4c859153e0e136ad8e72aee3566e17535293973b
[mudpy.git] / mudpy / data.py
1 """Data interface functions for the mudpy engine."""
2
3 # Copyright (c) 2004-2017 Jeremy Stanley <fungi@yuggoth.org>. Permission
4 # to use, copy, modify, and distribute this software is granted under
5 # terms provided in the LICENSE file distributed with this software.
6
7 import os
8 import re
9 import stat
10
11 import mudpy
12 import yaml
13
14
15 class Data:
16
17     """A file containing universe elements and their facets."""
18
19     def __init__(self,
20                  source,
21                  universe,
22                  flags=None,
23                  relative=None,
24                  ):
25         self.source = source
26         self.universe = universe
27         if flags is None:
28             self.flags = []
29         else:
30             self.flags = flags[:]
31         self.relative = relative
32         self.load()
33
34     def load(self):
35         """Read a file, create elements and poplulate facets accordingly."""
36         self.modified = False
37         self.source = find_file(
38                 self.source, relative=self.relative, universe=self.universe)
39         try:
40             self.data = yaml.safe_load(open(self.source))
41         except FileNotFoundError:
42             # it's normal if the file is one which doesn't exist yet
43             self.data = {}
44             log_entry = ("File %s is unavailable." % self.source, 6)
45             try:
46                 mudpy.misc.log(*log_entry)
47             except NameError:
48                 # happens when we're not far enough along in the init process
49                 self.universe.setup_loglines.append(log_entry)
50         if not hasattr(self.universe, "files"):
51             self.universe.files = {}
52         self.universe.files[self.source] = self
53         includes = []
54         for node in list(self.data):
55             if node == "_load":
56                 for included in self.data["_load"]:
57                     included = find_file(
58                         included,
59                         relative=self.source,
60                         universe=self.universe)
61                     if included not in includes:
62                         includes.append(included)
63                 continue
64             if node.startswith("_"):
65                 continue
66             facet_pos = node.rfind(".") + 1
67             if not facet_pos:
68                 mudpy.misc.Element(node, self.universe, self, old_style=True)
69             else:
70                 prefix = node[:facet_pos].strip(".")
71                 try:
72                     element = self.universe.contents[prefix]
73                 except KeyError:
74                     element = mudpy.misc.Element(prefix, self.universe, self)
75                 element.set(node[facet_pos:], self.data[node])
76                 if prefix.startswith("mudpy.movement."):
77                     self.universe.directions.add(
78                         prefix[prefix.rfind(".") + 1:])
79         for include_file in includes:
80             if not os.path.isabs(include_file):
81                 include_file = find_file(
82                     include_file,
83                     relative=self.source,
84                     universe=self.universe
85                 )
86             if (include_file not in self.universe.files or not
87                     self.universe.files[include_file].is_writeable()):
88                 Data(include_file, self.universe)
89
90     def save(self):
91         """Write the data, if necessary."""
92         normal_umask = 0o0022
93         private_umask = 0o0077
94         private_file_mode = 0o0600
95
96         # when modified, writeable and has content or the file exists
97         if self.modified and self.is_writeable() and (
98            self.data or os.path.exists(self.source)
99            ):
100
101             # make parent directories if necessary
102             if not os.path.exists(os.path.dirname(self.source)):
103                 old_umask = os.umask(normal_umask)
104                 os.makedirs(os.path.dirname(self.source))
105                 os.umask(old_umask)
106
107             # backup the file
108             if "mudpy.limit" in self.universe.contents:
109                 max_count = self.universe.contents["mudpy.limit"].get(
110                     "backups", 0)
111             else:
112                 max_count = 0
113             if os.path.exists(self.source) and max_count:
114                 backups = []
115                 for candidate in os.listdir(os.path.dirname(self.source)):
116                     if re.match(
117                        os.path.basename(self.source) +
118                        r"""\.\d+$""", candidate
119                        ):
120                         backups.append(int(candidate.split(".")[-1]))
121                 backups.sort()
122                 backups.reverse()
123                 for old_backup in backups:
124                     if old_backup >= max_count - 1:
125                         os.remove(self.source + "." + str(old_backup))
126                     elif not os.path.exists(
127                         self.source + "." + str(old_backup + 1)
128                     ):
129                         os.rename(
130                             self.source + "." + str(old_backup),
131                             self.source + "." + str(old_backup + 1)
132                         )
133                 if not os.path.exists(self.source + ".0"):
134                     os.rename(self.source, self.source + ".0")
135
136             # our data file
137             if "private" in self.flags:
138                 old_umask = os.umask(private_umask)
139                 file_descriptor = open(self.source, "w")
140                 if oct(stat.S_IMODE(os.stat(
141                         self.source)[stat.ST_MODE])) != private_file_mode:
142                     # if it's marked private, chmod it appropriately
143                     os.chmod(self.source, private_file_mode)
144             else:
145                 old_umask = os.umask(normal_umask)
146                 file_descriptor = open(self.source, "w")
147             os.umask(old_umask)
148
149             # write and close the file
150             yaml.safe_dump(self.data, allow_unicode=True,
151                            default_flow_style=False, stream=file_descriptor)
152             file_descriptor.close()
153
154             # unset the modified flag
155             self.modified = False
156
157     def is_writeable(self):
158         """Returns True if the _lock is False."""
159         try:
160             return not self.data.get("_lock", False)
161         except KeyError:
162             return True
163
164
165 def find_file(
166     file_name=None,
167     category=None,
168     prefix=None,
169     relative=None,
170     search=None,
171     stash=None,
172     universe=None
173 ):
174     """Return an absolute file path based on configuration."""
175
176     # this is all unnecessary if it's already absolute
177     if file_name and os.path.isabs(file_name):
178         return os.path.realpath(file_name)
179
180     # if a universe was provided, try to get some defaults from there
181     if universe:
182
183         if hasattr(
184                 universe, "contents") and "mudpy.filing" in universe.contents:
185             filing = universe.contents["mudpy.filing"]
186             if not prefix:
187                 prefix = filing.get("prefix")
188             if not search:
189                 search = filing.get("search")
190             if not stash:
191                 stash = filing.get("stash")
192
193         # if there's only one file loaded, try to work around a chicken<egg
194         elif hasattr(universe, "files") and len(
195             universe.files
196         ) == 1 and not universe.files[
197                 list(universe.files.keys())[0]].is_writeable():
198             data_file = universe.files[list(universe.files.keys())[0]].data
199
200             # try for a fallback default directory
201             if not stash:
202                 stash = data_file.get(".mudpy.filing.stash", "")
203
204             # try for a fallback root path
205             if not prefix:
206                 prefix = data_file.get(".mudpy.filing.prefix", "")
207
208             # try for a fallback search path
209             if not search:
210                 search = data_file.get(".mudpy.filing.search", "")
211
212         # another fallback root path, this time from the universe startdir
213         if hasattr(universe, "startdir"):
214             if not prefix:
215                 prefix = universe.startdir
216             elif not os.path.isabs(prefix):
217                 prefix = os.path.join(universe.startdir, prefix)
218
219     # when no root path is specified, assume the current working directory
220     if (not prefix or prefix == ".") and hasattr(universe, "startdir"):
221         prefix = universe.startdir
222
223     # make sure it's absolute
224     prefix = os.path.realpath(prefix)
225
226     # if there's no search path, just use the root path and etc
227     if not search:
228         search = [prefix, "etc"]
229
230     # work on a copy of the search path, to avoid modifying the caller's
231     else:
232         search = search[:]
233
234     # if there's no default path, use the last component of the search path
235     if not stash:
236         stash = search[-1]
237
238     # if an existing file or directory reference was supplied, prepend it
239     if relative:
240         if os.path.isdir(relative):
241             search = [relative] + search
242         else:
243             search = [os.path.dirname(relative)] + search
244
245     # make the search path entries absolute and throw away any dupes
246     clean_search = []
247     for each_path in search:
248         if not os.path.isabs(each_path):
249             each_path = os.path.realpath(os.path.join(prefix, each_path))
250         if each_path not in clean_search:
251             clean_search.append(each_path)
252
253     # start hunting for the file now
254     for each_path in clean_search:
255
256         # construct the candidate path
257         candidate = os.path.join(each_path, file_name)
258
259         # if the file exists and is readable, we're done
260         if os.path.isfile(candidate):
261             file_name = os.path.realpath(candidate)
262             break
263
264         # if the path is a directory, look for an __init__ file
265         if os.path.isdir(candidate):
266             file_name = os.path.realpath(
267                     os.path.join(candidate, "__init__.yaml"))
268             break
269
270     # it didn't exist after all, so use the default path instead
271     if not os.path.isabs(file_name):
272         file_name = os.path.join(stash, file_name)
273     if not os.path.isabs(file_name):
274         file_name = os.path.join(prefix, file_name)
275
276     # and normalize it last thing before returning
277     file_name = os.path.realpath(file_name)
278     return file_name