Remove the git2gch utility
[mudpy.git] / lib / mudpy / data.py
1 # -*- coding: utf-8 -*-
2 """Data interface functions for the mudpy engine."""
3
4 # Copyright (c) 2004-2014 Jeremy Stanley <fungi@yuggoth.org>. Permission
5 # to use, copy, modify, and distribute this software is granted under
6 # terms provided in the LICENSE file distributed with this software.
7
8 import codecs
9 import configparser
10 import os
11 import re
12 import stat
13 import sys
14
15 import mudpy
16 import yaml
17
18
19 class DataFile:
20
21     """A file containing universe elements."""
22
23     def __init__(self, filename, universe):
24         self.filename = filename
25         self.universe = universe
26         self.data = {}
27         self.load()
28
29     def load(self):
30         """Read a file and create elements accordingly."""
31         # TODO(fungi): remove this indirection after the YAML transition
32         if self.filename.endswith('.yaml'):
33             self.load_yaml()
34         else:
35             self.load_mpy()
36
37     def load_yaml(self):
38         """Read a file and create elements accordingly."""
39         # TODO(fungi): remove this parameter after the YAML transition
40         self._format = 'yaml'
41         self.modified = False
42         try:
43             self.data = yaml.load(open(self.filename))
44         except FileNotFoundError:
45             # it's normal if the file is one which doesn't exist yet
46             try:
47                 mudpy.misc.log("Couldn't read %s file." % self.filename, 6)
48             except NameError:
49                 # happens when we're not far enough along in the init process
50                 pass
51         if not hasattr(self.universe, "files"):
52             self.universe.files = {}
53         self.universe.files[self.filename] = self
54         includes = []
55         if "__control__" in self.data:
56             if "include_files" in self.data["__control__"]:
57                 for included in self.data["__control__"]["include_files"]:
58                     included = find_file(
59                         included,
60                         relative=self.filename,
61                         universe=self.universe)
62                     if included not in includes:
63                         includes.append(included)
64             if "include_dirs" in self.data["__control__"]:
65                 for included in [
66                     os.path.join(x, "__init__.yaml") for x in
67                         self.data["__control__"]["include_dirs"]
68                 ]:
69                     included = find_file(
70                         included,
71                         relative=self.filename,
72                         universe=self.universe
73                     )
74                     if included not in includes:
75                         includes.append(included)
76                 # TODO(fungi): remove this loop after the YAML transition
77                 for included in [
78                     os.path.join(x, "__init__.mpy") for x in
79                         self.data["__control__"]["include_dirs"]
80                 ]:
81                     included = find_file(
82                         included,
83                         relative=self.filename,
84                         universe=self.universe
85                     )
86                     if included not in includes:
87                         includes.append(included)
88             if "default_files" in self.data["__control__"]:
89                 origins = self.data["__control__"]["default_files"]
90                 for key in origins.keys():
91                     origins[key] = find_file(
92                         origins[key],
93                         relative=self.filename,
94                         universe=self.universe
95                     )
96                     if origins[key] not in includes:
97                         includes.append(origins[key])
98                     self.universe.default_origins[key] = origins[key]
99                     if key not in self.universe.categories:
100                         self.universe.categories[key] = {}
101             if "private_files" in self.data["__control__"]:
102                 for item in self.data["__control__"]["private_files"]:
103                     item = find_file(
104                         item,
105                         relative=self.filename,
106                         universe=self.universe
107                     )
108                     if item not in includes:
109                         includes.append(item)
110                     if item not in self.universe.private_files:
111                         self.universe.private_files.append(item)
112         for element in self.data:
113             if element != "__control__":
114                 mudpy.misc.Element(element, self.universe, self.filename)
115         for include_file in includes:
116             if not os.path.isabs(include_file):
117                 include_file = find_file(
118                     include_file,
119                     relative=self.filename,
120                     universe=self.universe
121                 )
122             if (include_file not in self.universe.files or not
123                     self.universe.files[include_file].is_writeable()):
124                 DataFile(include_file, self.universe)
125
126     # TODO(fungi): remove this method after the YAML transition
127     def load_mpy(self):
128         """Read a file and create elements accordingly."""
129         self._format = 'mpy'
130         self.modified = False
131         self.data = configparser.RawConfigParser()
132         if os.access(self.filename, os.R_OK):
133             self.data.read(self.filename)
134         if not hasattr(self.universe, "files"):
135             self.universe.files = {}
136         self.universe.files[self.filename] = self
137         includes = []
138         if self.data.has_option("__control__", "include_files"):
139             for included in makelist(
140                 self.data.get("__control__", "include_files")
141             ):
142                 included = find_file(
143                     included,
144                     relative=self.filename,
145                     universe=self.universe
146                 )
147                 if included not in includes:
148                     includes.append(included)
149         if self.data.has_option("__control__", "include_dirs"):
150             for included in [
151                 os.path.join(x, "__init__.yaml") for x in makelist(
152                     self.data["__control__"]["include_dirs"]
153                 )
154             ]:
155                 included = find_file(
156                     included,
157                     relative=self.filename,
158                     universe=self.universe
159                 )
160                 if included not in includes:
161                     includes.append(included)
162             for included in [
163                 os.path.join(x, "__init__.mpy") for x in makelist(
164                     self.data.get("__control__", "include_dirs")
165                 )
166             ]:
167                 included = find_file(
168                     included,
169                     relative=self.filename,
170                     universe=self.universe
171                 )
172                 if included not in includes:
173                     includes.append(included)
174         if self.data.has_option("__control__", "default_files"):
175             origins = makedict(
176                 self.data.get("__control__", "default_files")
177             )
178             for key in origins.keys():
179                 origins[key] = find_file(
180                     origins[key],
181                     relative=self.filename,
182                     universe=self.universe
183                 )
184                 if origins[key] not in includes:
185                     includes.append(origins[key])
186                 self.universe.default_origins[key] = origins[key]
187                 if key not in self.universe.categories:
188                     self.universe.categories[key] = {}
189         if self.data.has_option("__control__", "private_files"):
190             for item in makelist(
191                 self.data.get("__control__", "private_files")
192             ):
193                 item = find_file(
194                     item,
195                     relative=self.filename,
196                     universe=self.universe
197                 )
198                 if item not in includes:
199                     includes.append(item)
200                 if item not in self.universe.private_files:
201                     self.universe.private_files.append(item)
202         for section in self.data.sections():
203             if section != "__control__":
204                 mudpy.misc.Element(section, self.universe, self.filename)
205         for include_file in includes:
206             if not os.path.isabs(include_file):
207                 include_file = find_file(
208                     include_file,
209                     relative=self.filename,
210                     universe=self.universe
211                 )
212             if (include_file not in self.universe.files or not
213                     self.universe.files[include_file].is_writeable()):
214                 DataFile(include_file, self.universe)
215
216     # TODO(fungi): this should support writing YAML
217     def save(self):
218         """Write the data, if necessary."""
219
220         # when modified, writeable and has content or the file exists
221         if self.modified and self.is_writeable() and (
222            self.data.sections() or os.path.exists(self.filename)
223            ):
224
225             # make parent directories if necessary
226             if not os.path.exists(os.path.dirname(self.filename)):
227                 os.makedirs(os.path.dirname(self.filename))
228
229             # backup the file
230             if self.data.has_option("__control__", "backup_count"):
231                 max_count = self.data.has_option(
232                     "__control__", "backup_count")
233             else:
234                 max_count = self.universe.categories[
235                     "internal"
236                 ][
237                     "limits"
238                 ].getint("default_backup_count")
239             if os.path.exists(self.filename) and max_count:
240                 backups = []
241                 for candidate in os.listdir(os.path.dirname(self.filename)):
242                     if re.match(
243                        os.path.basename(self.filename) +
244                        """\.\d+$""", candidate
245                        ):
246                         backups.append(int(candidate.split(".")[-1]))
247                 backups.sort()
248                 backups.reverse()
249                 for old_backup in backups:
250                     if old_backup >= max_count - 1:
251                         os.remove(self.filename + "." + old_backup)
252                     elif not os.path.exists(
253                         self.filename + "." + old_backup + 1
254                     ):
255                         os.rename(
256                             self.filename + "." + old_backup,
257                             self.filename + "." + old_backup + 1
258                         )
259                 if not os.path.exists(self.filename + ".0"):
260                     os.rename(self.filename, self.filename + ".0")
261
262             # our data file
263             file_descriptor = codecs.open(self.filename, "w", "utf-8")
264
265             # if it's marked private, chmod it appropriately
266             if self.filename in self.universe.private_files and oct(
267                stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
268                ) != 0o0600:
269                 os.chmod(self.filename, 0o0600)
270
271             # write it back sorted, instead of using configparser
272             sections = self.data.sections()
273             sections.sort()
274             for section in sections:
275                 file_descriptor.write("[" + section + "]\n")
276                 options = self.data.options(section)
277                 options.sort()
278                 for option in options:
279                     file_descriptor.write(
280                         option + " = " +
281                         self.data.get(section, option) + "\n"
282                     )
283                 file_descriptor.write("\n")
284
285             # flush and close the file
286             file_descriptor.flush()
287             file_descriptor.close()
288
289             # unset the modified flag
290             self.modified = False
291
292     def is_writeable(self):
293         """Returns True if the __control__ read_only is False."""
294         # TODO(fungi): remove this indirection after the YAML transition
295         if self._format == "yaml":
296             try:
297                 return not self.data["__control__"].get("read_only", False)
298             except KeyError:
299                 return True
300         else:
301             return not self.data.has_option(
302                 "__control__", "read_only"
303             ) or not self.data.getboolean(
304                 "__control__", "read_only"
305             )
306
307
308 def find_file(
309     file_name=None,
310     root_path=None,
311     search_path=None,
312     default_dir=None,
313     relative=None,
314     universe=None
315 ):
316     """Return an absolute file path based on configuration."""
317
318     # make sure to get rid of any surrounding quotes first thing
319     if file_name:
320         file_name = file_name.strip("\"'")
321
322     # this is all unnecessary if it's already absolute
323     if file_name and os.path.isabs(file_name):
324         return os.path.realpath(file_name)
325
326     # when no file name is specified, look for <argv[0]>.conf
327     elif not file_name:
328         file_name = os.path.basename(sys.argv[0]) + ".conf"
329
330     # if a universe was provided, try to get some defaults from there
331     if universe:
332
333         if hasattr(
334            universe,
335            "contents"
336            ) and "internal:storage" in universe.contents:
337             storage = universe.categories["internal"]["storage"]
338             if not root_path:
339                 root_path = storage.get("root_path").strip("\"'")
340             if not search_path:
341                 search_path = storage.getlist("search_path")
342             if not default_dir:
343                 default_dir = storage.get("default_dir").strip("\"'")
344
345         # if there's only one file loaded, try to work around a chicken<egg
346         elif hasattr(universe, "files") and len(
347             universe.files
348         ) == 1 and not universe.files[
349                 list(universe.files.keys())[0]].is_writeable():
350             data_file = universe.files[list(universe.files.keys())[0]].data
351
352             # try for a fallback default directory
353             if not default_dir and data_file.has_option(
354                "internal:storage",
355                "default_dir"
356                ):
357                 default_dir = data_file.get(
358                     "internal:storage",
359                     "default_dir"
360                 ).strip("\"'")
361
362             # try for a fallback root path
363             if not root_path and data_file.has_option(
364                "internal:storage",
365                "root_path"
366                ):
367                 root_path = data_file.get(
368                     "internal:storage",
369                     "root_path"
370                 ).strip("\"'")
371
372             # try for a fallback search path
373             if not search_path and data_file.has_option(
374                "internal:storage",
375                "search_path"
376                ):
377                 search_path = makelist(
378                     data_file.get("internal:storage",
379                                   "search_path").strip("\"'")
380                 )
381
382         # another fallback root path, this time from the universe startdir
383         if not root_path and hasattr(universe, "startdir"):
384             root_path = universe.startdir
385
386     # when no root path is specified, assume the current working directory
387     if not root_path:
388         root_path = os.getcwd()
389
390     # otherwise, make sure it's absolute
391     elif not os.path.isabs(root_path):
392         root_path = os.path.realpath(root_path)
393
394     # if there's no search path, just use the root path and etc
395     if not search_path:
396         search_path = [root_path, "etc"]
397
398     # work on a copy of the search path, to avoid modifying the caller's
399     else:
400         search_path = search_path[:]
401
402     # if there's no default path, use the last element of the search path
403     if not default_dir:
404         default_dir = search_path[-1]
405
406     # if an existing file or directory reference was supplied, prepend it
407     if relative:
408         relative = relative.strip("\"'")
409         if os.path.isdir(relative):
410             search_path = [relative] + search_path
411         else:
412             search_path = [os.path.dirname(relative)] + search_path
413
414     # make the search path entries absolute and throw away any dupes
415     clean_search_path = []
416     for each_path in search_path:
417         each_path = each_path.strip("\"'")
418         if not os.path.isabs(each_path):
419             each_path = os.path.realpath(os.path.join(root_path, each_path))
420         if each_path not in clean_search_path:
421             clean_search_path.append(each_path)
422
423     # start hunting for the file now
424     for each_path in clean_search_path:
425
426         # if the file exists and is readable, we're done
427         if os.path.isfile(os.path.join(each_path, file_name)):
428             file_name = os.path.realpath(os.path.join(each_path, file_name))
429             break
430
431     # it didn't exist after all, so use the default path instead
432     if not os.path.isabs(file_name):
433         file_name = os.path.join(default_dir, file_name)
434     if not os.path.isabs(file_name):
435         file_name = os.path.join(root_path, file_name)
436
437     # and normalize it last thing before returning
438     file_name = os.path.realpath(file_name)
439
440     # normalize the resulting file path and hand it back
441     return file_name
442
443
444 def makelist(value):
445     """Turn string into list type."""
446     if value[0] + value[-1] == "[]":
447         return eval(value)
448     elif value[0] + value[-1] == "\"\"":
449         return [value[1:-1]]
450     else:
451         return [value]
452
453
454 def makedict(value):
455     """Turn string into dict type."""
456     if value[0] + value[-1] == "{}":
457         return eval(value)
458     elif value.find(":") > 0:
459         return eval("{" + value + "}")
460     else:
461         return {value: None}