Write files directly from yaml.dump
[mudpy.git] / lib / mudpy / data.py
1 """Data interface functions for the mudpy engine."""
2
3 # Copyright (c) 2004-2015 Jeremy Stanley <fungi@yuggoth.org>. Permission
4 # to use, copy, modify, and distribute this software is granted under
5 # terms provided in the LICENSE file distributed with this software.
6
7 import os
8 import re
9 import stat
10
11 import mudpy
12 import yaml
13
14
15 class DataFile:
16
17     """A file containing universe elements."""
18
19     def __init__(self, filename, universe):
20         self.filename = filename
21         self.universe = universe
22         self.data = {}
23         self.load()
24
25     def load(self):
26         """Read a file and create elements accordingly."""
27         self.modified = False
28         try:
29             self.data = yaml.load(open(self.filename))
30         except FileNotFoundError:
31             # it's normal if the file is one which doesn't exist yet
32             try:
33                 mudpy.misc.log("Couldn't read %s file." % self.filename, 6)
34             except NameError:
35                 # happens when we're not far enough along in the init process
36                 pass
37         if not hasattr(self.universe, "files"):
38             self.universe.files = {}
39         self.universe.files[self.filename] = self
40         includes = []
41         if "__control__" in self.data:
42             if "include_files" in self.data["__control__"]:
43                 for included in self.data["__control__"]["include_files"]:
44                     included = find_file(
45                         included,
46                         relative=self.filename,
47                         universe=self.universe)
48                     if included not in includes:
49                         includes.append(included)
50             if "include_dirs" in self.data["__control__"]:
51                 for included in [
52                     os.path.join(x, "__init__.yaml") for x in
53                         self.data["__control__"]["include_dirs"]
54                 ]:
55                     included = find_file(
56                         included,
57                         relative=self.filename,
58                         universe=self.universe
59                     )
60                     if included not in includes:
61                         includes.append(included)
62             if "default_files" in self.data["__control__"]:
63                 origins = self.data["__control__"]["default_files"]
64                 for key in origins.keys():
65                     origins[key] = find_file(
66                         origins[key],
67                         relative=self.filename,
68                         universe=self.universe
69                     )
70                     if origins[key] not in includes:
71                         includes.append(origins[key])
72                     self.universe.default_origins[key] = origins[key]
73                     if key not in self.universe.categories:
74                         self.universe.categories[key] = {}
75             if "private_files" in self.data["__control__"]:
76                 for item in self.data["__control__"]["private_files"]:
77                     item = find_file(
78                         item,
79                         relative=self.filename,
80                         universe=self.universe
81                     )
82                     if item not in includes:
83                         includes.append(item)
84                     if item not in self.universe.private_files:
85                         self.universe.private_files.append(item)
86         for element in self.data:
87             if element != "__control__":
88                 mudpy.misc.Element(element, self.universe, self.filename)
89         for include_file in includes:
90             if not os.path.isabs(include_file):
91                 include_file = find_file(
92                     include_file,
93                     relative=self.filename,
94                     universe=self.universe
95                 )
96             if (include_file not in self.universe.files or not
97                     self.universe.files[include_file].is_writeable()):
98                 DataFile(include_file, self.universe)
99
100     def save(self):
101         """Write the data, if necessary."""
102
103         # when modified, writeable and has content or the file exists
104         if self.modified and self.is_writeable() and (
105            self.data or os.path.exists(self.filename)
106            ):
107
108             # make parent directories if necessary
109             if not os.path.exists(os.path.dirname(self.filename)):
110                 os.makedirs(os.path.dirname(self.filename))
111
112             # backup the file
113             if "__control__" in self.data and "backup_count" in self.data[
114                     "__control__"]:
115                 max_count = self.data["__control__"]["backup_count"]
116             else:
117                 max_count = self.universe.categories[
118                     "internal"
119                 ][
120                     "limits"
121                 ].get("default_backup_count")
122             if os.path.exists(self.filename) and max_count:
123                 backups = []
124                 for candidate in os.listdir(os.path.dirname(self.filename)):
125                     if re.match(
126                        os.path.basename(self.filename) +
127                        """\.\d+$""", candidate
128                        ):
129                         backups.append(int(candidate.split(".")[-1]))
130                 backups.sort()
131                 backups.reverse()
132                 for old_backup in backups:
133                     if old_backup >= max_count - 1:
134                         os.remove(self.filename + "." + str(old_backup))
135                     elif not os.path.exists(
136                         self.filename + "." + str(old_backup + 1)
137                     ):
138                         os.rename(
139                             self.filename + "." + str(old_backup),
140                             self.filename + "." + str(old_backup + 1)
141                         )
142                 if not os.path.exists(self.filename + ".0"):
143                     os.rename(self.filename, self.filename + ".0")
144
145             # our data file
146             file_descriptor = open(self.filename, "w")
147
148             # if it's marked private, chmod it appropriately
149             if self.filename in self.universe.private_files and oct(
150                stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
151                ) != 0o0600:
152                 os.chmod(self.filename, 0o0600)
153
154             # write and close the file
155             yaml.dump(self.data, allow_unicode=True, default_flow_style=False,
156                       stream=file_descriptor)
157             file_descriptor.close()
158
159             # unset the modified flag
160             self.modified = False
161
162     def is_writeable(self):
163         """Returns True if the __control__ read_only is False."""
164         try:
165             return not self.data["__control__"].get("read_only", False)
166         except KeyError:
167             return True
168
169
170 def find_file(
171     file_name=None,
172     root_path=None,
173     search_path=None,
174     default_dir=None,
175     relative=None,
176     universe=None
177 ):
178     """Return an absolute file path based on configuration."""
179
180     # make sure to get rid of any surrounding quotes first thing
181     if file_name:
182         file_name = file_name.strip("\"'")
183
184     # this is all unnecessary if it's already absolute
185     if file_name and os.path.isabs(file_name):
186         return os.path.realpath(file_name)
187
188     # if a universe was provided, try to get some defaults from there
189     if universe:
190
191         if hasattr(
192            universe,
193            "contents"
194            ) and "internal:storage" in universe.contents:
195             storage = universe.categories["internal"]["storage"]
196             if not root_path:
197                 root_path = storage.get("root_path").strip("\"'")
198             if not search_path:
199                 search_path = storage.get("search_path")
200             if not default_dir:
201                 default_dir = storage.get("default_dir").strip("\"'")
202
203         # if there's only one file loaded, try to work around a chicken<egg
204         elif hasattr(universe, "files") and len(
205             universe.files
206         ) == 1 and not universe.files[
207                 list(universe.files.keys())[0]].is_writeable():
208             data_file = universe.files[list(universe.files.keys())[0]].data
209
210             # try for a fallback default directory
211             if not default_dir:
212                 default_dir = data_file.get(
213                     "internal:storage", "").get("default_dir", "")
214
215             # try for a fallback root path
216             if not root_path:
217                 root_path = data_file.get(
218                     "internal:storage", "").get("root_path", "")
219
220             # try for a fallback search path
221             if not search_path:
222                 search_path = data_file.get(
223                     "internal:storage", "").get("search_path", "")
224
225         # another fallback root path, this time from the universe startdir
226         if not root_path and hasattr(universe, "startdir"):
227             root_path = universe.startdir
228
229     # when no root path is specified, assume the current working directory
230     if not root_path:
231         root_path = os.getcwd()
232
233     # otherwise, make sure it's absolute
234     elif not os.path.isabs(root_path):
235         root_path = os.path.realpath(root_path)
236
237     # if there's no search path, just use the root path and etc
238     if not search_path:
239         search_path = [root_path, "etc"]
240
241     # work on a copy of the search path, to avoid modifying the caller's
242     else:
243         search_path = search_path[:]
244
245     # if there's no default path, use the last element of the search path
246     if not default_dir:
247         default_dir = search_path[-1]
248
249     # if an existing file or directory reference was supplied, prepend it
250     if relative:
251         relative = relative.strip("\"'")
252         if os.path.isdir(relative):
253             search_path = [relative] + search_path
254         else:
255             search_path = [os.path.dirname(relative)] + search_path
256
257     # make the search path entries absolute and throw away any dupes
258     clean_search_path = []
259     for each_path in search_path:
260         each_path = each_path.strip("\"'")
261         if not os.path.isabs(each_path):
262             each_path = os.path.realpath(os.path.join(root_path, each_path))
263         if each_path not in clean_search_path:
264             clean_search_path.append(each_path)
265
266     # start hunting for the file now
267     for each_path in clean_search_path:
268
269         # if the file exists and is readable, we're done
270         if os.path.isfile(os.path.join(each_path, file_name)):
271             file_name = os.path.realpath(os.path.join(each_path, file_name))
272             break
273
274     # it didn't exist after all, so use the default path instead
275     if not os.path.isabs(file_name):
276         file_name = os.path.join(default_dir, file_name)
277     if not os.path.isabs(file_name):
278         file_name = os.path.join(root_path, file_name)
279
280     # and normalize it last thing before returning
281     file_name = os.path.realpath(file_name)
282
283     # normalize the resulting file path and hand it back
284     return file_name