Overhaul data reloading
[mudpy.git] / mudpy / data.py
1 """Data interface functions for the mudpy engine."""
2
3 # Copyright (c) 2004-2018 Jeremy Stanley <fungi@yuggoth.org>. Permission
4 # to use, copy, modify, and distribute this software is granted under
5 # terms provided in the LICENSE file distributed with this software.
6
7 import os
8 import re
9 import stat
10
11 import mudpy
12 import yaml
13
14
15 class _IBSEmitter(yaml.emitter.Emitter):
16
17     """Override the default YAML Emitter to indent block sequences."""
18
19     def expect_block_sequence(self):
20         """Match the expectations of the ``yamllint`` style checker."""
21
22         # TODO(fungi) Get an option for this implemented upstream in
23         # the pyyaml library
24         self.increase_indent(flow=False, indentless=False)
25         self.state = self.expect_first_block_sequence_item
26
27
28 class _IBSDumper(yaml.SafeDumper, _IBSEmitter):
29
30     """Use our _IBSEmitter instead of the default implementation."""
31
32     pass
33
34
35 class Data:
36
37     """A file containing universe elements and their facets."""
38
39     def __init__(self,
40                  source,
41                  universe,
42                  flags=None,
43                  relative=None,
44                  ):
45         self.source = source
46         self.universe = universe
47         if flags is None:
48             self.flags = []
49         else:
50             self.flags = flags[:]
51         self.relative = relative
52         self.load()
53
54     def load(self):
55         """Read a file, create elements and poplulate facets accordingly."""
56         self.modified = False
57         self.source = find_file(
58                 self.source, relative=self.relative, universe=self.universe)
59         try:
60             self.data = yaml.safe_load(open(self.source))
61             log_entry = ("Loaded file %s into memory." % self.source, 5)
62         except FileNotFoundError:
63             # it's normal if the file is one which doesn't exist yet
64             self.data = {}
65             log_entry = ("File %s is unavailable." % self.source, 6)
66         try:
67             mudpy.misc.log(*log_entry)
68         except NameError:
69             # happens when we're not far enough along in the init process
70             self.universe.setup_loglines.append(log_entry)
71         if not hasattr(self.universe, "files"):
72             self.universe.files = {}
73         self.universe.files[self.source] = self
74         includes = []
75         for node in list(self.data):
76             if node == "_load":
77                 for included in self.data["_load"]:
78                     included = find_file(
79                         included,
80                         relative=self.source,
81                         universe=self.universe)
82                     if included not in includes:
83                         includes.append(included)
84                 continue
85             if node.startswith("_"):
86                 continue
87             facet_pos = node.rfind(".") + 1
88             prefix = node[:facet_pos].strip(".")
89             try:
90                 element = self.universe.contents[prefix]
91             except KeyError:
92                 element = mudpy.misc.Element(prefix, self.universe, self)
93             element.set(node[facet_pos:], self.data[node])
94             if prefix.startswith("mudpy.movement."):
95                 self.universe.directions.add(
96                     prefix[prefix.rfind(".") + 1:])
97         for include_file in includes:
98             if not os.path.isabs(include_file):
99                 include_file = find_file(
100                     include_file,
101                     relative=self.source,
102                     universe=self.universe
103                 )
104             if (include_file not in self.universe.files or not
105                     self.universe.files[include_file].is_writeable()):
106                 Data(include_file, self.universe)
107
108     def save(self):
109         """Write the data, if necessary."""
110         normal_umask = 0o0022
111         private_umask = 0o0077
112         private_file_mode = 0o0600
113
114         # when modified, writeable and has content or the file exists
115         if self.modified and self.is_writeable() and (
116            self.data or os.path.exists(self.source)
117            ):
118
119             # make parent directories if necessary
120             if not os.path.exists(os.path.dirname(self.source)):
121                 old_umask = os.umask(normal_umask)
122                 os.makedirs(os.path.dirname(self.source))
123                 os.umask(old_umask)
124
125             # backup the file
126             if "mudpy.limit" in self.universe.contents:
127                 max_count = self.universe.contents["mudpy.limit"].get(
128                     "backups", 0)
129             else:
130                 max_count = 0
131             if os.path.exists(self.source) and max_count:
132                 backups = []
133                 for candidate in os.listdir(os.path.dirname(self.source)):
134                     if re.match(
135                        os.path.basename(self.source) +
136                        r"""\.\d+$""", candidate
137                        ):
138                         backups.append(int(candidate.split(".")[-1]))
139                 backups.sort()
140                 backups.reverse()
141                 for old_backup in backups:
142                     if old_backup >= max_count - 1:
143                         os.remove(self.source + "." + str(old_backup))
144                     elif not os.path.exists(
145                         self.source + "." + str(old_backup + 1)
146                     ):
147                         os.rename(
148                             self.source + "." + str(old_backup),
149                             self.source + "." + str(old_backup + 1)
150                         )
151                 if not os.path.exists(self.source + ".0"):
152                     os.rename(self.source, self.source + ".0")
153
154             # our data file
155             if "private" in self.flags:
156                 old_umask = os.umask(private_umask)
157                 file_descriptor = open(self.source, "w")
158                 if oct(stat.S_IMODE(os.stat(
159                         self.source)[stat.ST_MODE])) != private_file_mode:
160                     # if it's marked private, chmod it appropriately
161                     os.chmod(self.source, private_file_mode)
162             else:
163                 old_umask = os.umask(normal_umask)
164                 file_descriptor = open(self.source, "w")
165             os.umask(old_umask)
166
167             # write and close the file
168             yaml.dump(self.data, Dumper=_IBSDumper, allow_unicode=True,
169                       default_flow_style=False, explicit_start=True, indent=4,
170                       stream=file_descriptor)
171             file_descriptor.close()
172
173             # unset the modified flag
174             self.modified = False
175
176     def is_writeable(self):
177         """Returns True if the _lock is False."""
178         try:
179             return not self.data.get("_lock", False)
180         except KeyError:
181             return True
182
183
184 def find_file(
185     file_name=None,
186     group=None,
187     prefix=None,
188     relative=None,
189     search=None,
190     stash=None,
191     universe=None
192 ):
193     """Return an absolute file path based on configuration."""
194
195     # this is all unnecessary if it's already absolute
196     if file_name and os.path.isabs(file_name):
197         return os.path.realpath(file_name)
198
199     # if a universe was provided, try to get some defaults from there
200     if universe:
201
202         if hasattr(
203                 universe, "contents") and "mudpy.filing" in universe.contents:
204             filing = universe.contents["mudpy.filing"]
205             if not prefix:
206                 prefix = filing.get("prefix")
207             if not search:
208                 search = filing.get("search")
209             if not stash:
210                 stash = filing.get("stash")
211
212         # if there's only one file loaded, try to work around a chicken<egg
213         elif hasattr(universe, "files") and len(
214             universe.files
215         ) == 1 and not universe.files[
216                 list(universe.files.keys())[0]].is_writeable():
217             data_file = universe.files[list(universe.files.keys())[0]].data
218
219             # try for a fallback default directory
220             if not stash:
221                 stash = data_file.get(".mudpy.filing.stash", "")
222
223             # try for a fallback root path
224             if not prefix:
225                 prefix = data_file.get(".mudpy.filing.prefix", "")
226
227             # try for a fallback search path
228             if not search:
229                 search = data_file.get(".mudpy.filing.search", "")
230
231         # another fallback root path, this time from the universe startdir
232         if hasattr(universe, "startdir"):
233             if not prefix:
234                 prefix = universe.startdir
235             elif not os.path.isabs(prefix):
236                 prefix = os.path.join(universe.startdir, prefix)
237
238     # when no root path is specified, assume the current working directory
239     if (not prefix or prefix == ".") and hasattr(universe, "startdir"):
240         prefix = universe.startdir
241
242     # make sure it's absolute
243     prefix = os.path.realpath(prefix)
244
245     # if there's no search path, just use the root path and etc
246     if not search:
247         search = [prefix, "etc"]
248
249     # work on a copy of the search path, to avoid modifying the caller's
250     else:
251         search = search[:]
252
253     # if there's no default path, use the last component of the search path
254     if not stash:
255         stash = search[-1]
256
257     # if an existing file or directory reference was supplied, prepend it
258     if relative:
259         if os.path.isdir(relative):
260             search = [relative] + search
261         else:
262             search = [os.path.dirname(relative)] + search
263
264     # make the search path entries absolute and throw away any dupes
265     clean_search = []
266     for each_path in search:
267         if not os.path.isabs(each_path):
268             each_path = os.path.realpath(os.path.join(prefix, each_path))
269         if each_path not in clean_search:
270             clean_search.append(each_path)
271
272     # start hunting for the file now
273     for each_path in clean_search:
274
275         # construct the candidate path
276         candidate = os.path.join(each_path, file_name)
277
278         # if the file exists and is readable, we're done
279         if os.path.isfile(candidate):
280             file_name = os.path.realpath(candidate)
281             break
282
283         # if the path is a directory, look for an __init__ file
284         if os.path.isdir(candidate):
285             file_name = os.path.realpath(
286                     os.path.join(candidate, "__init__.yaml"))
287             break
288
289     # it didn't exist after all, so use the default path instead
290     if not os.path.isabs(file_name):
291         file_name = os.path.join(stash, file_name)
292     if not os.path.isabs(file_name):
293         file_name = os.path.join(prefix, file_name)
294
295     # and normalize it last thing before returning
296     file_name = os.path.realpath(file_name)
297     return file_name