Begin the transition from INI to YAML
[mudpy.git] / lib / mudpy / data.py
1 # -*- coding: utf-8 -*-
2 """Data interface functions for the mudpy engine."""
3
4 # Copyright (c) 2004-2014 Jeremy Stanley <fungi@yuggoth.org>. Permission
5 # to use, copy, modify, and distribute this software is granted under
6 # terms provided in the LICENSE file distributed with this software.
7
8 import codecs
9 import configparser
10 import os
11 import re
12 import stat
13 import sys
14
15 import mudpy
16 import yaml
17
18
19 class DataFile:
20
21     """A file containing universe elements."""
22
23     def __init__(self, filename, universe):
24         self.filename = filename
25         self.universe = universe
26         self.load()
27
28     def load(self):
29         """Read a file and create elements accordingly."""
30         # TODO(fungi): remove this indirection after the YAML transition
31         if self.filename.endswith('.yaml'):
32             self.load_yaml()
33         else:
34             self.load_mpy()
35
36     def load_yaml(self):
37         """Read a file and create elements accordingly."""
38         # TODO(fungi): remove this parameter after the YAML transition
39         self._format = 'yaml'
40         self.modified = False
41         try:
42             self.data = yaml.load(open(self.filename))
43         except FileNotFoundError:
44             # it's normal if the file is one which doesn't exist yet
45             try:
46                 mudpy.misc.log("Couldn't read %s file." % self.filename, 6)
47             except NameError:
48                 # happens when we're not far enough along in the init process
49                 pass
50         if not hasattr(self.universe, "files"):
51             self.universe.files = {}
52         self.universe.files[self.filename] = self
53         includes = []
54         if "__control__" in self.data:
55             if "include_files" in self.data["__control__"]:
56                 for included in makelist(
57                         self.data["__control__"]["include_files"]):
58                     included = find_file(
59                         included,
60                         relative=self.filename,
61                         universe=self.universe)
62                     if included not in includes:
63                         includes.append(included)
64             if "include_dirs" in self.data["__control__"]:
65                 for included in [
66                     os.path.join(x, "__init__.mpy") for x in makelist(
67                         self.data["__control__"]["include_dirs"]
68                     )
69                 ]:
70                     included = find_file(
71                         included,
72                         relative=self.filename,
73                         universe=self.universe
74                     )
75                     if included not in includes:
76                         includes.append(included)
77             if "default_files" in self.data["__control__"]:
78                 origins = makedict(
79                     self.data["__control__"]["default_files"]
80                 )
81                 for key in origins.keys():
82                     origins[key] = find_file(
83                         origins[key],
84                         relative=self.filename,
85                         universe=self.universe
86                     )
87                     if origins[key] not in includes:
88                         includes.append(origins[key])
89                     self.universe.default_origins[key] = origins[key]
90                     if key not in self.universe.categories:
91                         self.universe.categories[key] = {}
92             if "private_files" in self.data["__control__"]:
93                 for item in makelist(
94                     self.data["__control__"]["private_files"]
95                 ):
96                     item = find_file(
97                         item,
98                         relative=self.filename,
99                         universe=self.universe
100                     )
101                     if item not in includes:
102                         includes.append(item)
103                     if item not in self.universe.private_files:
104                         self.universe.private_files.append(item)
105         for element in self.data:
106             if element != "__control__":
107                 mudpy.misc.Element(element, self.universe, self.filename)
108         for include_file in includes:
109             if not os.path.isabs(include_file):
110                 include_file = find_file(
111                     include_file,
112                     relative=self.filename,
113                     universe=self.universe
114                 )
115             if (include_file not in self.universe.files or not
116                     self.universe.files[include_file].is_writeable()):
117                 DataFile(include_file, self.universe)
118
119     # TODO(fungi): remove this method after the YAML transition
120     def load_mpy(self):
121         """Read a file and create elements accordingly."""
122         self._format = 'mpy'
123         self.modified = False
124         self.data = configparser.RawConfigParser()
125         if os.access(self.filename, os.R_OK):
126             self.data.read(self.filename)
127         if not hasattr(self.universe, "files"):
128             self.universe.files = {}
129         self.universe.files[self.filename] = self
130         includes = []
131         if self.data.has_option("__control__", "include_files"):
132             for included in makelist(
133                 self.data.get("__control__", "include_files")
134             ):
135                 included = find_file(
136                     included,
137                     relative=self.filename,
138                     universe=self.universe
139                 )
140                 if included not in includes:
141                     includes.append(included)
142         if self.data.has_option("__control__", "include_dirs"):
143             for included in [
144                 os.path.join(x, "__init__.mpy") for x in makelist(
145                     self.data.get("__control__", "include_dirs")
146                 )
147             ]:
148                 included = find_file(
149                     included,
150                     relative=self.filename,
151                     universe=self.universe
152                 )
153                 if included not in includes:
154                     includes.append(included)
155         if self.data.has_option("__control__", "default_files"):
156             origins = makedict(
157                 self.data.get("__control__", "default_files")
158             )
159             for key in origins.keys():
160                 origins[key] = find_file(
161                     origins[key],
162                     relative=self.filename,
163                     universe=self.universe
164                 )
165                 if origins[key] not in includes:
166                     includes.append(origins[key])
167                 self.universe.default_origins[key] = origins[key]
168                 if key not in self.universe.categories:
169                     self.universe.categories[key] = {}
170         if self.data.has_option("__control__", "private_files"):
171             for item in makelist(
172                 self.data.get("__control__", "private_files")
173             ):
174                 item = find_file(
175                     item,
176                     relative=self.filename,
177                     universe=self.universe
178                 )
179                 if item not in includes:
180                     includes.append(item)
181                 if item not in self.universe.private_files:
182                     self.universe.private_files.append(item)
183         for section in self.data.sections():
184             if section != "__control__":
185                 mudpy.misc.Element(section, self.universe, self.filename)
186         for include_file in includes:
187             if not os.path.isabs(include_file):
188                 include_file = find_file(
189                     include_file,
190                     relative=self.filename,
191                     universe=self.universe
192                 )
193             if (include_file not in self.universe.files or not
194                     self.universe.files[include_file].is_writeable()):
195                 DataFile(include_file, self.universe)
196
197     # TODO(fungi): this should support writing YAML
198     def save(self):
199         """Write the data, if necessary."""
200
201         # when modified, writeable and has content or the file exists
202         if self.modified and self.is_writeable() and (
203            self.data.sections() or os.path.exists(self.filename)
204            ):
205
206             # make parent directories if necessary
207             if not os.path.exists(os.path.dirname(self.filename)):
208                 os.makedirs(os.path.dirname(self.filename))
209
210             # backup the file
211             if self.data.has_option("__control__", "backup_count"):
212                 max_count = self.data.has_option(
213                     "__control__", "backup_count")
214             else:
215                 max_count = self.universe.categories[
216                     "internal"
217                 ][
218                     "limits"
219                 ].getint("default_backup_count")
220             if os.path.exists(self.filename) and max_count:
221                 backups = []
222                 for candidate in os.listdir(os.path.dirname(self.filename)):
223                     if re.match(
224                        os.path.basename(self.filename) +
225                        """\.\d+$""", candidate
226                        ):
227                         backups.append(int(candidate.split(".")[-1]))
228                 backups.sort()
229                 backups.reverse()
230                 for old_backup in backups:
231                     if old_backup >= max_count - 1:
232                         os.remove(self.filename + "." + old_backup)
233                     elif not os.path.exists(
234                         self.filename + "." + old_backup + 1
235                     ):
236                         os.rename(
237                             self.filename + "." + old_backup,
238                             self.filename + "." + old_backup + 1
239                         )
240                 if not os.path.exists(self.filename + ".0"):
241                     os.rename(self.filename, self.filename + ".0")
242
243             # our data file
244             file_descriptor = codecs.open(self.filename, "w", "utf-8")
245
246             # if it's marked private, chmod it appropriately
247             if self.filename in self.universe.private_files and oct(
248                stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
249                ) != 0o0600:
250                 os.chmod(self.filename, 0o0600)
251
252             # write it back sorted, instead of using configparser
253             sections = self.data.sections()
254             sections.sort()
255             for section in sections:
256                 file_descriptor.write("[" + section + "]\n")
257                 options = self.data.options(section)
258                 options.sort()
259                 for option in options:
260                     file_descriptor.write(
261                         option + " = " +
262                         self.data.get(section, option) + "\n"
263                     )
264                 file_descriptor.write("\n")
265
266             # flush and close the file
267             file_descriptor.flush()
268             file_descriptor.close()
269
270             # unset the modified flag
271             self.modified = False
272
273     # TODO(fungi): this should support writing YAML
274     def is_writeable(self):
275         """Returns True if the __control__ read_only is False."""
276         return not self.data.has_option(
277             "__control__", "read_only"
278         ) or not self.data.getboolean(
279             "__control__", "read_only"
280         )
281
282
283 def find_file(
284     file_name=None,
285     root_path=None,
286     search_path=None,
287     default_dir=None,
288     relative=None,
289     universe=None
290 ):
291     """Return an absolute file path based on configuration."""
292
293     # make sure to get rid of any surrounding quotes first thing
294     if file_name:
295         file_name = file_name.strip("\"'")
296
297     # this is all unnecessary if it's already absolute
298     if file_name and os.path.isabs(file_name):
299         return os.path.realpath(file_name)
300
301     # when no file name is specified, look for <argv[0]>.conf
302     elif not file_name:
303         file_name = os.path.basename(sys.argv[0]) + ".conf"
304
305     # if a universe was provided, try to get some defaults from there
306     if universe:
307
308         if hasattr(
309            universe,
310            "contents"
311            ) and "internal:storage" in universe.contents:
312             storage = universe.categories["internal"]["storage"]
313             if not root_path:
314                 root_path = storage.get("root_path").strip("\"'")
315             if not search_path:
316                 search_path = storage.getlist("search_path")
317             if not default_dir:
318                 default_dir = storage.get("default_dir").strip("\"'")
319
320         # if there's only one file loaded, try to work around a chicken<egg
321         elif hasattr(universe, "files") and len(
322             universe.files
323         ) == 1 and not universe.files[
324                 list(universe.files.keys())[0]].is_writeable():
325             data_file = universe.files[list(universe.files.keys())[0]].data
326
327             # try for a fallback default directory
328             if not default_dir and data_file.has_option(
329                "internal:storage",
330                "default_dir"
331                ):
332                 default_dir = data_file.get(
333                     "internal:storage",
334                     "default_dir"
335                 ).strip("\"'")
336
337             # try for a fallback root path
338             if not root_path and data_file.has_option(
339                "internal:storage",
340                "root_path"
341                ):
342                 root_path = data_file.get(
343                     "internal:storage",
344                     "root_path"
345                 ).strip("\"'")
346
347             # try for a fallback search path
348             if not search_path and data_file.has_option(
349                "internal:storage",
350                "search_path"
351                ):
352                 search_path = makelist(
353                     data_file.get("internal:storage",
354                                   "search_path").strip("\"'")
355                 )
356
357         # another fallback root path, this time from the universe startdir
358         if not root_path and hasattr(universe, "startdir"):
359             root_path = universe.startdir
360
361     # when no root path is specified, assume the current working directory
362     if not root_path:
363         root_path = os.getcwd()
364
365     # otherwise, make sure it's absolute
366     elif not os.path.isabs(root_path):
367         root_path = os.path.realpath(root_path)
368
369     # if there's no search path, just use the root path and etc
370     if not search_path:
371         search_path = [root_path, "etc"]
372
373     # work on a copy of the search path, to avoid modifying the caller's
374     else:
375         search_path = search_path[:]
376
377     # if there's no default path, use the last element of the search path
378     if not default_dir:
379         default_dir = search_path[-1]
380
381     # if an existing file or directory reference was supplied, prepend it
382     if relative:
383         relative = relative.strip("\"'")
384         if os.path.isdir(relative):
385             search_path = [relative] + search_path
386         else:
387             search_path = [os.path.dirname(relative)] + search_path
388
389     # make the search path entries absolute and throw away any dupes
390     clean_search_path = []
391     for each_path in search_path:
392         each_path = each_path.strip("\"'")
393         if not os.path.isabs(each_path):
394             each_path = os.path.realpath(os.path.join(root_path, each_path))
395         if each_path not in clean_search_path:
396             clean_search_path.append(each_path)
397
398     # start hunting for the file now
399     for each_path in clean_search_path:
400
401         # if the file exists and is readable, we're done
402         if os.path.isfile(os.path.join(each_path, file_name)):
403             file_name = os.path.realpath(os.path.join(each_path, file_name))
404             break
405
406     # it didn't exist after all, so use the default path instead
407     if not os.path.isabs(file_name):
408         file_name = os.path.join(default_dir, file_name)
409     if not os.path.isabs(file_name):
410         file_name = os.path.join(root_path, file_name)
411
412     # and normalize it last thing before returning
413     file_name = os.path.realpath(file_name)
414
415     # normalize the resulting file path and hand it back
416     return file_name
417
418
419 def makelist(value):
420     """Turn string into list type."""
421     if value[0] + value[-1] == "[]":
422         return eval(value)
423     elif value[0] + value[-1] == "\"\"":
424         return [value[1:-1]]
425     else:
426         return [value]
427
428
429 def makedict(value):
430     """Turn string into dict type."""
431     if value[0] + value[-1] == "{}":
432         return eval(value)
433     elif value.find(":") > 0:
434         return eval("{" + value + "}")
435     else:
436         return {value: None}