2e22c7e0e2581e6c0350f6328aa3da7044e41c64
[mudpy.git] / mudpy / data.py
1 """Data interface functions for the mudpy engine."""
2
3 # Copyright (c) 2004-2016 Jeremy Stanley <fungi@yuggoth.org>. Permission
4 # to use, copy, modify, and distribute this software is granted under
5 # terms provided in the LICENSE file distributed with this software.
6
7 import os
8 import re
9 import stat
10
11 import mudpy
12 import yaml
13
14
15 class DataFile:
16
17     """A file containing universe elements and their facets."""
18
19     def __init__(self, filename, universe):
20         self.filename = filename
21         self.universe = universe
22         self.data = {}
23         self.load()
24
25     def load(self):
26         """Read a file, create elements and poplulate facets accordingly."""
27         self.modified = False
28         try:
29             self.data = yaml.safe_load(open(self.filename))
30         except FileNotFoundError:
31             # it's normal if the file is one which doesn't exist yet
32             log_entry = ("File %s is unavailable." % self.filename, 6)
33             try:
34                 mudpy.misc.log(*log_entry)
35             except NameError:
36                 # happens when we're not far enough along in the init process
37                 self.universe.setup_loglines.append(log_entry)
38         if not hasattr(self.universe, "files"):
39             self.universe.files = {}
40         self.universe.files[self.filename] = self
41         includes = []
42         if "__control__" in self.data:
43             if "include_files" in self.data["__control__"]:
44                 for included in self.data["__control__"]["include_files"]:
45                     included = find_file(
46                         included,
47                         relative=self.filename,
48                         universe=self.universe)
49                     if included not in includes:
50                         includes.append(included)
51             if "include_dirs" in self.data["__control__"]:
52                 for included in [
53                     os.path.join(x, "__init__.yaml") for x in
54                         self.data["__control__"]["include_dirs"]
55                 ]:
56                     included = find_file(
57                         included,
58                         relative=self.filename,
59                         universe=self.universe
60                     )
61                     if included not in includes:
62                         includes.append(included)
63             if "default_files" in self.data["__control__"]:
64                 origins = self.data["__control__"]["default_files"]
65                 for key in origins.keys():
66                     origins[key] = find_file(
67                         origins[key],
68                         relative=self.filename,
69                         universe=self.universe
70                     )
71                     if origins[key] not in includes:
72                         includes.append(origins[key])
73                     self.universe.default_origins[key] = origins[key]
74                     if key not in self.universe.categories:
75                         self.universe.categories[key] = {}
76             if "private_files" in self.data["__control__"]:
77                 for item in self.data["__control__"]["private_files"]:
78                     item = find_file(
79                         item,
80                         relative=self.filename,
81                         universe=self.universe
82                     )
83                     if item not in includes:
84                         includes.append(item)
85                     if item not in self.universe.private_files:
86                         self.universe.private_files.append(item)
87         for node in list(self.data):
88             if node == "__control__":
89                 continue
90             facet_pos = node.rfind(".") + 1
91             if not facet_pos:
92                 mudpy.misc.Element(node, self.universe, self.filename,
93                                    old_style=True)
94             else:
95                 prefix = node[:facet_pos].strip(".")
96                 try:
97                     element = self.universe.contents[prefix]
98                 except KeyError:
99                     element = mudpy.misc.Element(prefix, self.universe,
100                                                  self.filename)
101                 element.set(node[facet_pos:], self.data[node])
102                 if prefix.startswith("mudpy.movement."):
103                     self.universe.directions.add(
104                         prefix[prefix.rfind(".") + 1:])
105         for include_file in includes:
106             if not os.path.isabs(include_file):
107                 include_file = find_file(
108                     include_file,
109                     relative=self.filename,
110                     universe=self.universe
111                 )
112             if (include_file not in self.universe.files or not
113                     self.universe.files[include_file].is_writeable()):
114                 DataFile(include_file, self.universe)
115
116     def save(self):
117         """Write the data, if necessary."""
118         normal_umask = 0o0022
119         private_umask = 0o0077
120         private_file_mode = 0o0600
121
122         # when modified, writeable and has content or the file exists
123         if self.modified and self.is_writeable() and (
124            self.data or os.path.exists(self.filename)
125            ):
126
127             # make parent directories if necessary
128             if not os.path.exists(os.path.dirname(self.filename)):
129                 old_umask = os.umask(normal_umask)
130                 os.makedirs(os.path.dirname(self.filename))
131                 os.umask(old_umask)
132
133             # backup the file
134             if "__control__" in self.data and "backup_count" in self.data[
135                     "__control__"]:
136                 max_count = self.data["__control__"]["backup_count"]
137             elif "mudpy.limit" in self.universe.contents:
138                 max_count = self.universe.contents["mudpy.limit"].get(
139                     "backups", 0)
140             else:
141                 max_count = 0
142             if os.path.exists(self.filename) and max_count:
143                 backups = []
144                 for candidate in os.listdir(os.path.dirname(self.filename)):
145                     if re.match(
146                        os.path.basename(self.filename) +
147                        """\.\d+$""", candidate
148                        ):
149                         backups.append(int(candidate.split(".")[-1]))
150                 backups.sort()
151                 backups.reverse()
152                 for old_backup in backups:
153                     if old_backup >= max_count - 1:
154                         os.remove(self.filename + "." + str(old_backup))
155                     elif not os.path.exists(
156                         self.filename + "." + str(old_backup + 1)
157                     ):
158                         os.rename(
159                             self.filename + "." + str(old_backup),
160                             self.filename + "." + str(old_backup + 1)
161                         )
162                 if not os.path.exists(self.filename + ".0"):
163                     os.rename(self.filename, self.filename + ".0")
164
165             # our data file
166             if self.filename in self.universe.private_files:
167                 old_umask = os.umask(private_umask)
168                 file_descriptor = open(self.filename, "w")
169                 if oct(stat.S_IMODE(os.stat(
170                         self.filename)[stat.ST_MODE])) != private_file_mode:
171                     # if it's marked private, chmod it appropriately
172                     os.chmod(self.filename, private_file_mode)
173             else:
174                 old_umask = os.umask(normal_umask)
175                 file_descriptor = open(self.filename, "w")
176             os.umask(old_umask)
177
178             # write and close the file
179             yaml.safe_dump(self.data, allow_unicode=True,
180                            default_flow_style=False, stream=file_descriptor)
181             file_descriptor.close()
182
183             # unset the modified flag
184             self.modified = False
185
186     def is_writeable(self):
187         """Returns True if the __control__ read_only is False."""
188         try:
189             return not self.data["__control__"].get("read_only", False)
190         except KeyError:
191             return True
192
193
194 def find_file(
195     file_name=None,
196     root_path=None,
197     search_path=None,
198     default_dir=None,
199     relative=None,
200     universe=None
201 ):
202     """Return an absolute file path based on configuration."""
203
204     # make sure to get rid of any surrounding quotes first thing
205     if file_name:
206         file_name = file_name.strip("\"'")
207
208     # this is all unnecessary if it's already absolute
209     if file_name and os.path.isabs(file_name):
210         return os.path.realpath(file_name)
211
212     # if a universe was provided, try to get some defaults from there
213     if universe:
214
215         if hasattr(
216            universe,
217            "contents"
218            ) and "internal:storage" in universe.contents:
219             storage = universe.categories["internal"]["storage"]
220             if not root_path:
221                 root_path = storage.get("root_path").strip("\"'")
222             if not search_path:
223                 search_path = storage.get("search_path")
224             if not default_dir:
225                 default_dir = storage.get("default_dir").strip("\"'")
226
227         # if there's only one file loaded, try to work around a chicken<egg
228         elif hasattr(universe, "files") and len(
229             universe.files
230         ) == 1 and not universe.files[
231                 list(universe.files.keys())[0]].is_writeable():
232             data_file = universe.files[list(universe.files.keys())[0]].data
233
234             # try for a fallback default directory
235             if not default_dir:
236                 default_dir = data_file.get(
237                     "internal:storage", "").get("default_dir", "")
238
239             # try for a fallback root path
240             if not root_path:
241                 root_path = data_file.get(
242                     "internal:storage", "").get("root_path", "")
243
244             # try for a fallback search path
245             if not search_path:
246                 search_path = data_file.get(
247                     "internal:storage", "").get("search_path", "")
248
249         # another fallback root path, this time from the universe startdir
250         if not root_path and hasattr(universe, "startdir"):
251             root_path = universe.startdir
252
253     # when no root path is specified, assume the current working directory
254     if not root_path:
255         root_path = os.getcwd()
256
257     # otherwise, make sure it's absolute
258     elif not os.path.isabs(root_path):
259         root_path = os.path.realpath(root_path)
260
261     # if there's no search path, just use the root path and etc
262     if not search_path:
263         search_path = [root_path, "etc"]
264
265     # work on a copy of the search path, to avoid modifying the caller's
266     else:
267         search_path = search_path[:]
268
269     # if there's no default path, use the last component of the search path
270     if not default_dir:
271         default_dir = search_path[-1]
272
273     # if an existing file or directory reference was supplied, prepend it
274     if relative:
275         relative = relative.strip("\"'")
276         if os.path.isdir(relative):
277             search_path = [relative] + search_path
278         else:
279             search_path = [os.path.dirname(relative)] + search_path
280
281     # make the search path entries absolute and throw away any dupes
282     clean_search_path = []
283     for each_path in search_path:
284         each_path = each_path.strip("\"'")
285         if not os.path.isabs(each_path):
286             each_path = os.path.realpath(os.path.join(root_path, each_path))
287         if each_path not in clean_search_path:
288             clean_search_path.append(each_path)
289
290     # start hunting for the file now
291     for each_path in clean_search_path:
292
293         # if the file exists and is readable, we're done
294         if os.path.isfile(os.path.join(each_path, file_name)):
295             file_name = os.path.realpath(os.path.join(each_path, file_name))
296             break
297
298     # it didn't exist after all, so use the default path instead
299     if not os.path.isabs(file_name):
300         file_name = os.path.join(default_dir, file_name)
301     if not os.path.isabs(file_name):
302         file_name = os.path.join(root_path, file_name)
303
304     # and normalize it last thing before returning
305     file_name = os.path.realpath(file_name)
306
307     # normalize the resulting file path and hand it back
308     return file_name