Fix type mismatches in save file rotation
[mudpy.git] / lib / mudpy / data.py
1 # -*- coding: utf-8 -*-
2 """Data interface functions for the mudpy engine."""
3
4 # Copyright (c) 2004-2014 Jeremy Stanley <fungi@yuggoth.org>. Permission
5 # to use, copy, modify, and distribute this software is granted under
6 # terms provided in the LICENSE file distributed with this software.
7
8 import codecs
9 import os
10 import re
11 import stat
12
13 import mudpy
14 import yaml
15
16
17 class DataFile:
18
19     """A file containing universe elements."""
20
21     def __init__(self, filename, universe):
22         self.filename = filename
23         self.universe = universe
24         self.data = {}
25         self.load()
26
27     def load(self):
28         """Read a file and create elements accordingly."""
29         self.modified = False
30         try:
31             self.data = yaml.load(open(self.filename))
32         except FileNotFoundError:
33             # it's normal if the file is one which doesn't exist yet
34             try:
35                 mudpy.misc.log("Couldn't read %s file." % self.filename, 6)
36             except NameError:
37                 # happens when we're not far enough along in the init process
38                 pass
39         if not hasattr(self.universe, "files"):
40             self.universe.files = {}
41         self.universe.files[self.filename] = self
42         includes = []
43         if "__control__" in self.data:
44             if "include_files" in self.data["__control__"]:
45                 for included in self.data["__control__"]["include_files"]:
46                     included = find_file(
47                         included,
48                         relative=self.filename,
49                         universe=self.universe)
50                     if included not in includes:
51                         includes.append(included)
52             if "include_dirs" in self.data["__control__"]:
53                 for included in [
54                     os.path.join(x, "__init__.yaml") for x in
55                         self.data["__control__"]["include_dirs"]
56                 ]:
57                     included = find_file(
58                         included,
59                         relative=self.filename,
60                         universe=self.universe
61                     )
62                     if included not in includes:
63                         includes.append(included)
64             if "default_files" in self.data["__control__"]:
65                 origins = self.data["__control__"]["default_files"]
66                 for key in origins.keys():
67                     origins[key] = find_file(
68                         origins[key],
69                         relative=self.filename,
70                         universe=self.universe
71                     )
72                     if origins[key] not in includes:
73                         includes.append(origins[key])
74                     self.universe.default_origins[key] = origins[key]
75                     if key not in self.universe.categories:
76                         self.universe.categories[key] = {}
77             if "private_files" in self.data["__control__"]:
78                 for item in self.data["__control__"]["private_files"]:
79                     item = find_file(
80                         item,
81                         relative=self.filename,
82                         universe=self.universe
83                     )
84                     if item not in includes:
85                         includes.append(item)
86                     if item not in self.universe.private_files:
87                         self.universe.private_files.append(item)
88         for element in self.data:
89             if element != "__control__":
90                 mudpy.misc.Element(element, self.universe, self.filename)
91         for include_file in includes:
92             if not os.path.isabs(include_file):
93                 include_file = find_file(
94                     include_file,
95                     relative=self.filename,
96                     universe=self.universe
97                 )
98             if (include_file not in self.universe.files or not
99                     self.universe.files[include_file].is_writeable()):
100                 DataFile(include_file, self.universe)
101
102     def save(self):
103         """Write the data, if necessary."""
104
105         # when modified, writeable and has content or the file exists
106         if self.modified and self.is_writeable() and (
107            self.data or os.path.exists(self.filename)
108            ):
109
110             # make parent directories if necessary
111             if not os.path.exists(os.path.dirname(self.filename)):
112                 os.makedirs(os.path.dirname(self.filename))
113
114             # backup the file
115             if "__control__" in self.data and "backup_count" in self.data[
116                     "__control__"]:
117                 max_count = self.data["__control__"]["backup_count"]
118             else:
119                 max_count = self.universe.categories[
120                     "internal"
121                 ][
122                     "limits"
123                 ].get("default_backup_count")
124             if os.path.exists(self.filename) and max_count:
125                 backups = []
126                 for candidate in os.listdir(os.path.dirname(self.filename)):
127                     if re.match(
128                        os.path.basename(self.filename) +
129                        """\.\d+$""", candidate
130                        ):
131                         backups.append(int(candidate.split(".")[-1]))
132                 backups.sort()
133                 backups.reverse()
134                 for old_backup in backups:
135                     if old_backup >= max_count - 1:
136                         os.remove(self.filename + "." + str(old_backup))
137                     elif not os.path.exists(
138                         self.filename + "." + str(old_backup + 1)
139                     ):
140                         os.rename(
141                             self.filename + "." + str(old_backup),
142                             self.filename + "." + str(old_backup + 1)
143                         )
144                 if not os.path.exists(self.filename + ".0"):
145                     os.rename(self.filename, self.filename + ".0")
146
147             # our data file
148             file_descriptor = codecs.open(self.filename, "w", "utf-8")
149
150             # if it's marked private, chmod it appropriately
151             if self.filename in self.universe.private_files and oct(
152                stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
153                ) != 0o0600:
154                 os.chmod(self.filename, 0o0600)
155
156             # write, flush and close the file
157             file_descriptor.write(yaml.dump(self.data))
158             file_descriptor.flush()
159             file_descriptor.close()
160
161             # unset the modified flag
162             self.modified = False
163
164     def is_writeable(self):
165         """Returns True if the __control__ read_only is False."""
166         try:
167             return not self.data["__control__"].get("read_only", False)
168         except KeyError:
169             return True
170
171
172 def find_file(
173     file_name=None,
174     root_path=None,
175     search_path=None,
176     default_dir=None,
177     relative=None,
178     universe=None
179 ):
180     """Return an absolute file path based on configuration."""
181
182     # make sure to get rid of any surrounding quotes first thing
183     if file_name:
184         file_name = file_name.strip("\"'")
185
186     # this is all unnecessary if it's already absolute
187     if file_name and os.path.isabs(file_name):
188         return os.path.realpath(file_name)
189
190     # if a universe was provided, try to get some defaults from there
191     if universe:
192
193         if hasattr(
194            universe,
195            "contents"
196            ) and "internal:storage" in universe.contents:
197             storage = universe.categories["internal"]["storage"]
198             if not root_path:
199                 root_path = storage.get("root_path").strip("\"'")
200             if not search_path:
201                 search_path = storage.get("search_path")
202             if not default_dir:
203                 default_dir = storage.get("default_dir").strip("\"'")
204
205         # if there's only one file loaded, try to work around a chicken<egg
206         elif hasattr(universe, "files") and len(
207             universe.files
208         ) == 1 and not universe.files[
209                 list(universe.files.keys())[0]].is_writeable():
210             data_file = universe.files[list(universe.files.keys())[0]].data
211
212             # try for a fallback default directory
213             if not default_dir:
214                 default_dir = data_file.get(
215                     "internal:storage", "").get("default_dir", "")
216
217             # try for a fallback root path
218             if not root_path:
219                 root_path = data_file.get(
220                     "internal:storage", "").get("root_path", "")
221
222             # try for a fallback search path
223             if not search_path:
224                 search_path = data_file.get(
225                     "internal:storage", "").get("search_path", "")
226
227         # another fallback root path, this time from the universe startdir
228         if not root_path and hasattr(universe, "startdir"):
229             root_path = universe.startdir
230
231     # when no root path is specified, assume the current working directory
232     if not root_path:
233         root_path = os.getcwd()
234
235     # otherwise, make sure it's absolute
236     elif not os.path.isabs(root_path):
237         root_path = os.path.realpath(root_path)
238
239     # if there's no search path, just use the root path and etc
240     if not search_path:
241         search_path = [root_path, "etc"]
242
243     # work on a copy of the search path, to avoid modifying the caller's
244     else:
245         search_path = search_path[:]
246
247     # if there's no default path, use the last element of the search path
248     if not default_dir:
249         default_dir = search_path[-1]
250
251     # if an existing file or directory reference was supplied, prepend it
252     if relative:
253         relative = relative.strip("\"'")
254         if os.path.isdir(relative):
255             search_path = [relative] + search_path
256         else:
257             search_path = [os.path.dirname(relative)] + search_path
258
259     # make the search path entries absolute and throw away any dupes
260     clean_search_path = []
261     for each_path in search_path:
262         each_path = each_path.strip("\"'")
263         if not os.path.isabs(each_path):
264             each_path = os.path.realpath(os.path.join(root_path, each_path))
265         if each_path not in clean_search_path:
266             clean_search_path.append(each_path)
267
268     # start hunting for the file now
269     for each_path in clean_search_path:
270
271         # if the file exists and is readable, we're done
272         if os.path.isfile(os.path.join(each_path, file_name)):
273             file_name = os.path.realpath(os.path.join(each_path, file_name))
274             break
275
276     # it didn't exist after all, so use the default path instead
277     if not os.path.isabs(file_name):
278         file_name = os.path.join(default_dir, file_name)
279     if not os.path.isabs(file_name):
280         file_name = os.path.join(root_path, file_name)
281
282     # and normalize it last thing before returning
283     file_name = os.path.realpath(file_name)
284
285     # normalize the resulting file path and hand it back
286     return file_name