00ab923a5b316fed314ac7a5cda07fe9a6a4b981
[mudpy.git] / mudpy / data.py
1 """Data interface functions for the mudpy engine."""
2
3 # Copyright (c) 2004-2016 Jeremy Stanley <fungi@yuggoth.org>. Permission
4 # to use, copy, modify, and distribute this software is granted under
5 # terms provided in the LICENSE file distributed with this software.
6
7 import os
8 import re
9 import stat
10
11 import mudpy
12 import yaml
13
14
15 class DataFile:
16
17     """A file containing universe elements and their facets."""
18
19     def __init__(self, filename, universe):
20         self.filename = filename
21         self.universe = universe
22         self.data = {}
23         self.load()
24
25     def load(self):
26         """Read a file, create elements and poplulate facets accordingly."""
27         self.modified = False
28         try:
29             self.data = yaml.safe_load(open(self.filename))
30         except FileNotFoundError:
31             # it's normal if the file is one which doesn't exist yet
32             log_entry = ("File %s is unavailable." % self.filename, 6)
33             try:
34                 mudpy.misc.log(*log_entry)
35             except NameError:
36                 # happens when we're not far enough along in the init process
37                 self.universe.setup_loglines.append(log_entry)
38         if not hasattr(self.universe, "files"):
39             self.universe.files = {}
40         self.universe.files[self.filename] = self
41         includes = []
42         if "__control__" in self.data:
43             if "include_files" in self.data["__control__"]:
44                 for included in self.data["__control__"]["include_files"]:
45                     included = find_file(
46                         included,
47                         relative=self.filename,
48                         universe=self.universe)
49                     if included not in includes:
50                         includes.append(included)
51             if "include_dirs" in self.data["__control__"]:
52                 for included in [
53                     os.path.join(x, "__init__.yaml") for x in
54                         self.data["__control__"]["include_dirs"]
55                 ]:
56                     included = find_file(
57                         included,
58                         relative=self.filename,
59                         universe=self.universe
60                     )
61                     if included not in includes:
62                         includes.append(included)
63             if "default_files" in self.data["__control__"]:
64                 origins = self.data["__control__"]["default_files"]
65                 for key in origins.keys():
66                     origins[key] = find_file(
67                         origins[key],
68                         relative=self.filename,
69                         universe=self.universe
70                     )
71                     if origins[key] not in includes:
72                         includes.append(origins[key])
73                     self.universe.default_origins[key] = origins[key]
74                     if key not in self.universe.categories:
75                         self.universe.categories[key] = {}
76             if "private_files" in self.data["__control__"]:
77                 for item in self.data["__control__"]["private_files"]:
78                     item = find_file(
79                         item,
80                         relative=self.filename,
81                         universe=self.universe
82                     )
83                     if item not in includes:
84                         includes.append(item)
85                     if item not in self.universe.private_files:
86                         self.universe.private_files.append(item)
87         for node in list(self.data):
88             if node == "__control__":
89                 continue
90             facet_pos = node.rfind(".") + 1
91             if not facet_pos:
92                 mudpy.misc.Element(node, self.universe, self.filename,
93                                    old_style=True)
94             else:
95                 prefix = node[:facet_pos].strip(".")
96                 try:
97                     element = self.universe.contents[prefix]
98                 except KeyError:
99                     element = mudpy.misc.Element(prefix, self.universe,
100                         self.filename)
101                 element.set(node[facet_pos:], self.data[node])
102                 if prefix.startswith("mudpy.movement."):
103                     self.universe.directions.add(
104                         prefix[prefix.rfind(".") + 1:])
105         for include_file in includes:
106             if not os.path.isabs(include_file):
107                 include_file = find_file(
108                     include_file,
109                     relative=self.filename,
110                     universe=self.universe
111                 )
112             if (include_file not in self.universe.files or not
113                     self.universe.files[include_file].is_writeable()):
114                 DataFile(include_file, self.universe)
115
116     def save(self):
117         """Write the data, if necessary."""
118         normal_umask = 0o0022
119         private_umask = 0o0077
120         private_file_mode = 0o0600
121
122         # when modified, writeable and has content or the file exists
123         if self.modified and self.is_writeable() and (
124            self.data or os.path.exists(self.filename)
125            ):
126
127             # make parent directories if necessary
128             if not os.path.exists(os.path.dirname(self.filename)):
129                 old_umask = os.umask(normal_umask)
130                 os.makedirs(os.path.dirname(self.filename))
131                 os.umask(old_umask)
132
133             # backup the file
134             if "__control__" in self.data and "backup_count" in self.data[
135                     "__control__"]:
136                 max_count = self.data["__control__"]["backup_count"]
137             else:
138                 max_count = self.universe.contents["mudpy.limit"].get(
139                     "backups")
140             if os.path.exists(self.filename) and max_count:
141                 backups = []
142                 for candidate in os.listdir(os.path.dirname(self.filename)):
143                     if re.match(
144                        os.path.basename(self.filename) +
145                        """\.\d+$""", candidate
146                        ):
147                         backups.append(int(candidate.split(".")[-1]))
148                 backups.sort()
149                 backups.reverse()
150                 for old_backup in backups:
151                     if old_backup >= max_count - 1:
152                         os.remove(self.filename + "." + str(old_backup))
153                     elif not os.path.exists(
154                         self.filename + "." + str(old_backup + 1)
155                     ):
156                         os.rename(
157                             self.filename + "." + str(old_backup),
158                             self.filename + "." + str(old_backup + 1)
159                         )
160                 if not os.path.exists(self.filename + ".0"):
161                     os.rename(self.filename, self.filename + ".0")
162
163             # our data file
164             if self.filename in self.universe.private_files:
165                 old_umask = os.umask(private_umask)
166                 file_descriptor = open(self.filename, "w")
167                 if oct(stat.S_IMODE(os.stat(
168                         self.filename)[stat.ST_MODE])) != private_file_mode:
169                     # if it's marked private, chmod it appropriately
170                     os.chmod(self.filename, private_file_mode)
171             else:
172                 old_umask = os.umask(normal_umask)
173                 file_descriptor = open(self.filename, "w")
174             os.umask(old_umask)
175
176             # write and close the file
177             yaml.safe_dump(self.data, allow_unicode=True,
178                            default_flow_style=False, stream=file_descriptor)
179             file_descriptor.close()
180
181             # unset the modified flag
182             self.modified = False
183
184     def is_writeable(self):
185         """Returns True if the __control__ read_only is False."""
186         try:
187             return not self.data["__control__"].get("read_only", False)
188         except KeyError:
189             return True
190
191
192 def find_file(
193     file_name=None,
194     root_path=None,
195     search_path=None,
196     default_dir=None,
197     relative=None,
198     universe=None
199 ):
200     """Return an absolute file path based on configuration."""
201
202     # make sure to get rid of any surrounding quotes first thing
203     if file_name:
204         file_name = file_name.strip("\"'")
205
206     # this is all unnecessary if it's already absolute
207     if file_name and os.path.isabs(file_name):
208         return os.path.realpath(file_name)
209
210     # if a universe was provided, try to get some defaults from there
211     if universe:
212
213         if hasattr(
214            universe,
215            "contents"
216            ) and "internal:storage" in universe.contents:
217             storage = universe.categories["internal"]["storage"]
218             if not root_path:
219                 root_path = storage.get("root_path").strip("\"'")
220             if not search_path:
221                 search_path = storage.get("search_path")
222             if not default_dir:
223                 default_dir = storage.get("default_dir").strip("\"'")
224
225         # if there's only one file loaded, try to work around a chicken<egg
226         elif hasattr(universe, "files") and len(
227             universe.files
228         ) == 1 and not universe.files[
229                 list(universe.files.keys())[0]].is_writeable():
230             data_file = universe.files[list(universe.files.keys())[0]].data
231
232             # try for a fallback default directory
233             if not default_dir:
234                 default_dir = data_file.get(
235                     "internal:storage", "").get("default_dir", "")
236
237             # try for a fallback root path
238             if not root_path:
239                 root_path = data_file.get(
240                     "internal:storage", "").get("root_path", "")
241
242             # try for a fallback search path
243             if not search_path:
244                 search_path = data_file.get(
245                     "internal:storage", "").get("search_path", "")
246
247         # another fallback root path, this time from the universe startdir
248         if not root_path and hasattr(universe, "startdir"):
249             root_path = universe.startdir
250
251     # when no root path is specified, assume the current working directory
252     if not root_path:
253         root_path = os.getcwd()
254
255     # otherwise, make sure it's absolute
256     elif not os.path.isabs(root_path):
257         root_path = os.path.realpath(root_path)
258
259     # if there's no search path, just use the root path and etc
260     if not search_path:
261         search_path = [root_path, "etc"]
262
263     # work on a copy of the search path, to avoid modifying the caller's
264     else:
265         search_path = search_path[:]
266
267     # if there's no default path, use the last component of the search path
268     if not default_dir:
269         default_dir = search_path[-1]
270
271     # if an existing file or directory reference was supplied, prepend it
272     if relative:
273         relative = relative.strip("\"'")
274         if os.path.isdir(relative):
275             search_path = [relative] + search_path
276         else:
277             search_path = [os.path.dirname(relative)] + search_path
278
279     # make the search path entries absolute and throw away any dupes
280     clean_search_path = []
281     for each_path in search_path:
282         each_path = each_path.strip("\"'")
283         if not os.path.isabs(each_path):
284             each_path = os.path.realpath(os.path.join(root_path, each_path))
285         if each_path not in clean_search_path:
286             clean_search_path.append(each_path)
287
288     # start hunting for the file now
289     for each_path in clean_search_path:
290
291         # if the file exists and is readable, we're done
292         if os.path.isfile(os.path.join(each_path, file_name)):
293             file_name = os.path.realpath(os.path.join(each_path, file_name))
294             break
295
296     # it didn't exist after all, so use the default path instead
297     if not os.path.isabs(file_name):
298         file_name = os.path.join(default_dir, file_name)
299     if not os.path.isabs(file_name):
300         file_name = os.path.join(root_path, file_name)
301
302     # and normalize it last thing before returning
303     file_name = os.path.realpath(file_name)
304
305     # normalize the resulting file path and hand it back
306     return file_name