Initialize data file contents on creation
[mudpy.git] / lib / mudpy / data.py
1 # -*- coding: utf-8 -*-
2 """Data interface functions for the mudpy engine."""
3
4 # Copyright (c) 2004-2014 Jeremy Stanley <fungi@yuggoth.org>. Permission
5 # to use, copy, modify, and distribute this software is granted under
6 # terms provided in the LICENSE file distributed with this software.
7
8 import codecs
9 import configparser
10 import os
11 import re
12 import stat
13 import sys
14
15 import mudpy
16 import yaml
17
18
19 class DataFile:
20
21     """A file containing universe elements."""
22
23     def __init__(self, filename, universe):
24         self.filename = filename
25         self.universe = universe
26         self.data = {}
27         self.load()
28
29     def load(self):
30         """Read a file and create elements accordingly."""
31         # TODO(fungi): remove this indirection after the YAML transition
32         if self.filename.endswith('.yaml'):
33             self.load_yaml()
34         else:
35             self.load_mpy()
36
37     def load_yaml(self):
38         """Read a file and create elements accordingly."""
39         # TODO(fungi): remove this parameter after the YAML transition
40         self._format = 'yaml'
41         self.modified = False
42         try:
43             self.data = yaml.load(open(self.filename))
44         except FileNotFoundError:
45             # it's normal if the file is one which doesn't exist yet
46             try:
47                 mudpy.misc.log("Couldn't read %s file." % self.filename, 6)
48             except NameError:
49                 # happens when we're not far enough along in the init process
50                 pass
51         if not hasattr(self.universe, "files"):
52             self.universe.files = {}
53         self.universe.files[self.filename] = self
54         includes = []
55         if "__control__" in self.data:
56             if "include_files" in self.data["__control__"]:
57                 for included in makelist(
58                         self.data["__control__"]["include_files"]):
59                     included = find_file(
60                         included,
61                         relative=self.filename,
62                         universe=self.universe)
63                     if included not in includes:
64                         includes.append(included)
65             if "include_dirs" in self.data["__control__"]:
66                 for included in [
67                     os.path.join(x, "__init__.mpy") for x in makelist(
68                         self.data["__control__"]["include_dirs"]
69                     )
70                 ]:
71                     included = find_file(
72                         included,
73                         relative=self.filename,
74                         universe=self.universe
75                     )
76                     if included not in includes:
77                         includes.append(included)
78             if "default_files" in self.data["__control__"]:
79                 origins = makedict(
80                     self.data["__control__"]["default_files"]
81                 )
82                 for key in origins.keys():
83                     origins[key] = find_file(
84                         origins[key],
85                         relative=self.filename,
86                         universe=self.universe
87                     )
88                     if origins[key] not in includes:
89                         includes.append(origins[key])
90                     self.universe.default_origins[key] = origins[key]
91                     if key not in self.universe.categories:
92                         self.universe.categories[key] = {}
93             if "private_files" in self.data["__control__"]:
94                 for item in makelist(
95                     self.data["__control__"]["private_files"]
96                 ):
97                     item = find_file(
98                         item,
99                         relative=self.filename,
100                         universe=self.universe
101                     )
102                     if item not in includes:
103                         includes.append(item)
104                     if item not in self.universe.private_files:
105                         self.universe.private_files.append(item)
106         for element in self.data:
107             if element != "__control__":
108                 mudpy.misc.Element(element, self.universe, self.filename)
109         for include_file in includes:
110             if not os.path.isabs(include_file):
111                 include_file = find_file(
112                     include_file,
113                     relative=self.filename,
114                     universe=self.universe
115                 )
116             if (include_file not in self.universe.files or not
117                     self.universe.files[include_file].is_writeable()):
118                 DataFile(include_file, self.universe)
119
120     # TODO(fungi): remove this method after the YAML transition
121     def load_mpy(self):
122         """Read a file and create elements accordingly."""
123         self._format = 'mpy'
124         self.modified = False
125         self.data = configparser.RawConfigParser()
126         if os.access(self.filename, os.R_OK):
127             self.data.read(self.filename)
128         if not hasattr(self.universe, "files"):
129             self.universe.files = {}
130         self.universe.files[self.filename] = self
131         includes = []
132         if self.data.has_option("__control__", "include_files"):
133             for included in makelist(
134                 self.data.get("__control__", "include_files")
135             ):
136                 included = find_file(
137                     included,
138                     relative=self.filename,
139                     universe=self.universe
140                 )
141                 if included not in includes:
142                     includes.append(included)
143         if self.data.has_option("__control__", "include_dirs"):
144             for included in [
145                 os.path.join(x, "__init__.mpy") for x in makelist(
146                     self.data.get("__control__", "include_dirs")
147                 )
148             ]:
149                 included = find_file(
150                     included,
151                     relative=self.filename,
152                     universe=self.universe
153                 )
154                 if included not in includes:
155                     includes.append(included)
156         if self.data.has_option("__control__", "default_files"):
157             origins = makedict(
158                 self.data.get("__control__", "default_files")
159             )
160             for key in origins.keys():
161                 origins[key] = find_file(
162                     origins[key],
163                     relative=self.filename,
164                     universe=self.universe
165                 )
166                 if origins[key] not in includes:
167                     includes.append(origins[key])
168                 self.universe.default_origins[key] = origins[key]
169                 if key not in self.universe.categories:
170                     self.universe.categories[key] = {}
171         if self.data.has_option("__control__", "private_files"):
172             for item in makelist(
173                 self.data.get("__control__", "private_files")
174             ):
175                 item = find_file(
176                     item,
177                     relative=self.filename,
178                     universe=self.universe
179                 )
180                 if item not in includes:
181                     includes.append(item)
182                 if item not in self.universe.private_files:
183                     self.universe.private_files.append(item)
184         for section in self.data.sections():
185             if section != "__control__":
186                 mudpy.misc.Element(section, self.universe, self.filename)
187         for include_file in includes:
188             if not os.path.isabs(include_file):
189                 include_file = find_file(
190                     include_file,
191                     relative=self.filename,
192                     universe=self.universe
193                 )
194             if (include_file not in self.universe.files or not
195                     self.universe.files[include_file].is_writeable()):
196                 DataFile(include_file, self.universe)
197
198     # TODO(fungi): this should support writing YAML
199     def save(self):
200         """Write the data, if necessary."""
201
202         # when modified, writeable and has content or the file exists
203         if self.modified and self.is_writeable() and (
204            self.data.sections() or os.path.exists(self.filename)
205            ):
206
207             # make parent directories if necessary
208             if not os.path.exists(os.path.dirname(self.filename)):
209                 os.makedirs(os.path.dirname(self.filename))
210
211             # backup the file
212             if self.data.has_option("__control__", "backup_count"):
213                 max_count = self.data.has_option(
214                     "__control__", "backup_count")
215             else:
216                 max_count = self.universe.categories[
217                     "internal"
218                 ][
219                     "limits"
220                 ].getint("default_backup_count")
221             if os.path.exists(self.filename) and max_count:
222                 backups = []
223                 for candidate in os.listdir(os.path.dirname(self.filename)):
224                     if re.match(
225                        os.path.basename(self.filename) +
226                        """\.\d+$""", candidate
227                        ):
228                         backups.append(int(candidate.split(".")[-1]))
229                 backups.sort()
230                 backups.reverse()
231                 for old_backup in backups:
232                     if old_backup >= max_count - 1:
233                         os.remove(self.filename + "." + old_backup)
234                     elif not os.path.exists(
235                         self.filename + "." + old_backup + 1
236                     ):
237                         os.rename(
238                             self.filename + "." + old_backup,
239                             self.filename + "." + old_backup + 1
240                         )
241                 if not os.path.exists(self.filename + ".0"):
242                     os.rename(self.filename, self.filename + ".0")
243
244             # our data file
245             file_descriptor = codecs.open(self.filename, "w", "utf-8")
246
247             # if it's marked private, chmod it appropriately
248             if self.filename in self.universe.private_files and oct(
249                stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
250                ) != 0o0600:
251                 os.chmod(self.filename, 0o0600)
252
253             # write it back sorted, instead of using configparser
254             sections = self.data.sections()
255             sections.sort()
256             for section in sections:
257                 file_descriptor.write("[" + section + "]\n")
258                 options = self.data.options(section)
259                 options.sort()
260                 for option in options:
261                     file_descriptor.write(
262                         option + " = " +
263                         self.data.get(section, option) + "\n"
264                     )
265                 file_descriptor.write("\n")
266
267             # flush and close the file
268             file_descriptor.flush()
269             file_descriptor.close()
270
271             # unset the modified flag
272             self.modified = False
273
274     # TODO(fungi): this should support writing YAML
275     def is_writeable(self):
276         """Returns True if the __control__ read_only is False."""
277         return not self.data.has_option(
278             "__control__", "read_only"
279         ) or not self.data.getboolean(
280             "__control__", "read_only"
281         )
282
283
284 def find_file(
285     file_name=None,
286     root_path=None,
287     search_path=None,
288     default_dir=None,
289     relative=None,
290     universe=None
291 ):
292     """Return an absolute file path based on configuration."""
293
294     # make sure to get rid of any surrounding quotes first thing
295     if file_name:
296         file_name = file_name.strip("\"'")
297
298     # this is all unnecessary if it's already absolute
299     if file_name and os.path.isabs(file_name):
300         return os.path.realpath(file_name)
301
302     # when no file name is specified, look for <argv[0]>.conf
303     elif not file_name:
304         file_name = os.path.basename(sys.argv[0]) + ".conf"
305
306     # if a universe was provided, try to get some defaults from there
307     if universe:
308
309         if hasattr(
310            universe,
311            "contents"
312            ) and "internal:storage" in universe.contents:
313             storage = universe.categories["internal"]["storage"]
314             if not root_path:
315                 root_path = storage.get("root_path").strip("\"'")
316             if not search_path:
317                 search_path = storage.getlist("search_path")
318             if not default_dir:
319                 default_dir = storage.get("default_dir").strip("\"'")
320
321         # if there's only one file loaded, try to work around a chicken<egg
322         elif hasattr(universe, "files") and len(
323             universe.files
324         ) == 1 and not universe.files[
325                 list(universe.files.keys())[0]].is_writeable():
326             data_file = universe.files[list(universe.files.keys())[0]].data
327
328             # try for a fallback default directory
329             if not default_dir and data_file.has_option(
330                "internal:storage",
331                "default_dir"
332                ):
333                 default_dir = data_file.get(
334                     "internal:storage",
335                     "default_dir"
336                 ).strip("\"'")
337
338             # try for a fallback root path
339             if not root_path and data_file.has_option(
340                "internal:storage",
341                "root_path"
342                ):
343                 root_path = data_file.get(
344                     "internal:storage",
345                     "root_path"
346                 ).strip("\"'")
347
348             # try for a fallback search path
349             if not search_path and data_file.has_option(
350                "internal:storage",
351                "search_path"
352                ):
353                 search_path = makelist(
354                     data_file.get("internal:storage",
355                                   "search_path").strip("\"'")
356                 )
357
358         # another fallback root path, this time from the universe startdir
359         if not root_path and hasattr(universe, "startdir"):
360             root_path = universe.startdir
361
362     # when no root path is specified, assume the current working directory
363     if not root_path:
364         root_path = os.getcwd()
365
366     # otherwise, make sure it's absolute
367     elif not os.path.isabs(root_path):
368         root_path = os.path.realpath(root_path)
369
370     # if there's no search path, just use the root path and etc
371     if not search_path:
372         search_path = [root_path, "etc"]
373
374     # work on a copy of the search path, to avoid modifying the caller's
375     else:
376         search_path = search_path[:]
377
378     # if there's no default path, use the last element of the search path
379     if not default_dir:
380         default_dir = search_path[-1]
381
382     # if an existing file or directory reference was supplied, prepend it
383     if relative:
384         relative = relative.strip("\"'")
385         if os.path.isdir(relative):
386             search_path = [relative] + search_path
387         else:
388             search_path = [os.path.dirname(relative)] + search_path
389
390     # make the search path entries absolute and throw away any dupes
391     clean_search_path = []
392     for each_path in search_path:
393         each_path = each_path.strip("\"'")
394         if not os.path.isabs(each_path):
395             each_path = os.path.realpath(os.path.join(root_path, each_path))
396         if each_path not in clean_search_path:
397             clean_search_path.append(each_path)
398
399     # start hunting for the file now
400     for each_path in clean_search_path:
401
402         # if the file exists and is readable, we're done
403         if os.path.isfile(os.path.join(each_path, file_name)):
404             file_name = os.path.realpath(os.path.join(each_path, file_name))
405             break
406
407     # it didn't exist after all, so use the default path instead
408     if not os.path.isabs(file_name):
409         file_name = os.path.join(default_dir, file_name)
410     if not os.path.isabs(file_name):
411         file_name = os.path.join(root_path, file_name)
412
413     # and normalize it last thing before returning
414     file_name = os.path.realpath(file_name)
415
416     # normalize the resulting file path and hand it back
417     return file_name
418
419
420 def makelist(value):
421     """Turn string into list type."""
422     if value[0] + value[-1] == "[]":
423         return eval(value)
424     elif value[0] + value[-1] == "\"\"":
425         return [value[1:-1]]
426     else:
427         return [value]
428
429
430 def makedict(value):
431     """Turn string into dict type."""
432     if value[0] + value[-1] == "{}":
433         return eval(value)
434     elif value.find(":") > 0:
435         return eval("{" + value + "}")
436     else:
437         return {value: None}