Drop support for Python 2.x
[mudpy.git] / lib / mudpy / data.py
1 # -*- coding: utf-8 -*-
2 """Data interface functions for the mudpy engine."""
3
4 # Copyright (c) 2004-2014 Jeremy Stanley <fungi@yuggoth.org>. Permission
5 # to use, copy, modify, and distribute this software is granted under
6 # terms provided in the LICENSE file distributed with this software.
7
8 import codecs
9 import configparser
10 import os
11 import re
12 import stat
13 import sys
14
15 import mudpy
16
17
18 class DataFile:
19
20     """A file containing universe elements."""
21
22     def __init__(self, filename, universe):
23         self.filename = filename
24         self.universe = universe
25         self.load()
26
27     def load(self):
28         """Read a file and create elements accordingly."""
29         self.data = configparser.RawConfigParser()
30         self.modified = False
31         if os.access(self.filename, os.R_OK):
32             self.data.read(self.filename)
33         if not hasattr(self.universe, "files"):
34             self.universe.files = {}
35         self.universe.files[self.filename] = self
36         includes = []
37         if self.data.has_option("__control__", "include_files"):
38             for included in makelist(
39                 self.data.get("__control__", "include_files")
40             ):
41                 included = find_file(
42                     included,
43                     relative=self.filename,
44                     universe=self.universe
45                 )
46                 if included not in includes:
47                     includes.append(included)
48         if self.data.has_option("__control__", "include_dirs"):
49             for included in [
50                 os.path.join(x, "__init__.mpy") for x in makelist(
51                     self.data.get("__control__", "include_dirs")
52                 )
53             ]:
54                 included = find_file(
55                     included,
56                     relative=self.filename,
57                     universe=self.universe
58                 )
59                 if included not in includes:
60                     includes.append(included)
61         if self.data.has_option("__control__", "default_files"):
62             origins = makedict(
63                 self.data.get("__control__", "default_files")
64             )
65             for key in origins.keys():
66                 origins[key] = find_file(
67                     origins[key],
68                     relative=self.filename,
69                     universe=self.universe
70                 )
71                 if origins[key] not in includes:
72                     includes.append(origins[key])
73                 self.universe.default_origins[key] = origins[key]
74                 if key not in self.universe.categories:
75                     self.universe.categories[key] = {}
76         if self.data.has_option("__control__", "private_files"):
77             for item in makelist(
78                 self.data.get("__control__", "private_files")
79             ):
80                 item = find_file(
81                     item,
82                     relative=self.filename,
83                     universe=self.universe
84                 )
85                 if item not in includes:
86                     includes.append(item)
87                 if item not in self.universe.private_files:
88                     self.universe.private_files.append(item)
89         for section in self.data.sections():
90             if section != "__control__":
91                 mudpy.misc.Element(section, self.universe, self.filename)
92         for include_file in includes:
93             if not os.path.isabs(include_file):
94                 include_file = find_file(
95                     include_file,
96                     relative=self.filename,
97                     universe=self.universe
98                 )
99             if (include_file not in self.universe.files or not
100                     self.universe.files[include_file].is_writeable()):
101                 DataFile(include_file, self.universe)
102
103     def save(self):
104         """Write the data, if necessary."""
105
106         # when modified, writeable and has content or the file exists
107         if self.modified and self.is_writeable() and (
108            self.data.sections() or os.path.exists(self.filename)
109            ):
110
111             # make parent directories if necessary
112             if not os.path.exists(os.path.dirname(self.filename)):
113                 os.makedirs(os.path.dirname(self.filename))
114
115             # backup the file
116             if self.data.has_option("__control__", "backup_count"):
117                 max_count = self.data.has_option(
118                     "__control__", "backup_count")
119             else:
120                 max_count = self.universe.categories[
121                     "internal"
122                 ][
123                     "limits"
124                 ].getint("default_backup_count")
125             if os.path.exists(self.filename) and max_count:
126                 backups = []
127                 for candidate in os.listdir(os.path.dirname(self.filename)):
128                     if re.match(
129                        os.path.basename(self.filename) +
130                        """\.\d+$""", candidate
131                        ):
132                         backups.append(int(candidate.split(".")[-1]))
133                 backups.sort()
134                 backups.reverse()
135                 for old_backup in backups:
136                     if old_backup >= max_count - 1:
137                         os.remove(self.filename + "." + old_backup)
138                     elif not os.path.exists(
139                         self.filename + "." + old_backup + 1
140                     ):
141                         os.rename(
142                             self.filename + "." + old_backup,
143                             self.filename + "." + old_backup + 1
144                         )
145                 if not os.path.exists(self.filename + ".0"):
146                     os.rename(self.filename, self.filename + ".0")
147
148             # our data file
149             file_descriptor = codecs.open(self.filename, "w", "utf-8")
150
151             # if it's marked private, chmod it appropriately
152             if self.filename in self.universe.private_files and oct(
153                stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
154                ) != 0o0600:
155                 os.chmod(self.filename, 0o0600)
156
157             # write it back sorted, instead of using configparser
158             sections = self.data.sections()
159             sections.sort()
160             for section in sections:
161                 file_descriptor.write("[" + section + "]\n")
162                 options = self.data.options(section)
163                 options.sort()
164                 for option in options:
165                     file_descriptor.write(
166                         option + " = " +
167                         self.data.get(section, option) + "\n"
168                     )
169                 file_descriptor.write("\n")
170
171             # flush and close the file
172             file_descriptor.flush()
173             file_descriptor.close()
174
175             # unset the modified flag
176             self.modified = False
177
178     def is_writeable(self):
179         """Returns True if the __control__ read_only is False."""
180         return not self.data.has_option(
181             "__control__", "read_only"
182         ) or not self.data.getboolean(
183             "__control__", "read_only"
184         )
185
186
187 def find_file(
188     file_name=None,
189     root_path=None,
190     search_path=None,
191     default_dir=None,
192     relative=None,
193     universe=None
194 ):
195     """Return an absolute file path based on configuration."""
196
197     # make sure to get rid of any surrounding quotes first thing
198     if file_name:
199         file_name = file_name.strip("\"'")
200
201     # this is all unnecessary if it's already absolute
202     if file_name and os.path.isabs(file_name):
203         return os.path.realpath(file_name)
204
205     # when no file name is specified, look for <argv[0]>.conf
206     elif not file_name:
207         file_name = os.path.basename(sys.argv[0]) + ".conf"
208
209     # if a universe was provided, try to get some defaults from there
210     if universe:
211
212         if hasattr(
213            universe,
214            "contents"
215            ) and "internal:storage" in universe.contents:
216             storage = universe.categories["internal"]["storage"]
217             if not root_path:
218                 root_path = storage.get("root_path").strip("\"'")
219             if not search_path:
220                 search_path = storage.getlist("search_path")
221             if not default_dir:
222                 default_dir = storage.get("default_dir").strip("\"'")
223
224         # if there's only one file loaded, try to work around a chicken<egg
225         elif hasattr(universe, "files") and len(
226             universe.files
227         ) == 1 and not universe.files[
228                 list(universe.files.keys())[0]].is_writeable():
229             data_file = universe.files[list(universe.files.keys())[0]].data
230
231             # try for a fallback default directory
232             if not default_dir and data_file.has_option(
233                "internal:storage",
234                "default_dir"
235                ):
236                 default_dir = data_file.get(
237                     "internal:storage",
238                     "default_dir"
239                 ).strip("\"'")
240
241             # try for a fallback root path
242             if not root_path and data_file.has_option(
243                "internal:storage",
244                "root_path"
245                ):
246                 root_path = data_file.get(
247                     "internal:storage",
248                     "root_path"
249                 ).strip("\"'")
250
251             # try for a fallback search path
252             if not search_path and data_file.has_option(
253                "internal:storage",
254                "search_path"
255                ):
256                 search_path = makelist(
257                     data_file.get("internal:storage",
258                                   "search_path").strip("\"'")
259                 )
260
261         # another fallback root path, this time from the universe startdir
262         if not root_path and hasattr(universe, "startdir"):
263             root_path = universe.startdir
264
265     # when no root path is specified, assume the current working directory
266     if not root_path:
267         root_path = os.getcwd()
268
269     # otherwise, make sure it's absolute
270     elif not os.path.isabs(root_path):
271         root_path = os.path.realpath(root_path)
272
273     # if there's no search path, just use the root path and etc
274     if not search_path:
275         search_path = [root_path, "etc"]
276
277     # work on a copy of the search path, to avoid modifying the caller's
278     else:
279         search_path = search_path[:]
280
281     # if there's no default path, use the last element of the search path
282     if not default_dir:
283         default_dir = search_path[-1]
284
285     # if an existing file or directory reference was supplied, prepend it
286     if relative:
287         relative = relative.strip("\"'")
288         if os.path.isdir(relative):
289             search_path = [relative] + search_path
290         else:
291             search_path = [os.path.dirname(relative)] + search_path
292
293     # make the search path entries absolute and throw away any dupes
294     clean_search_path = []
295     for each_path in search_path:
296         each_path = each_path.strip("\"'")
297         if not os.path.isabs(each_path):
298             each_path = os.path.realpath(os.path.join(root_path, each_path))
299         if each_path not in clean_search_path:
300             clean_search_path.append(each_path)
301
302     # start hunting for the file now
303     for each_path in clean_search_path:
304
305         # if the file exists and is readable, we're done
306         if os.path.isfile(os.path.join(each_path, file_name)):
307             file_name = os.path.realpath(os.path.join(each_path, file_name))
308             break
309
310     # it didn't exist after all, so use the default path instead
311     if not os.path.isabs(file_name):
312         file_name = os.path.join(default_dir, file_name)
313     if not os.path.isabs(file_name):
314         file_name = os.path.join(root_path, file_name)
315
316     # and normalize it last thing before returning
317     file_name = os.path.realpath(file_name)
318
319     # normalize the resulting file path and hand it back
320     return file_name
321
322
323 def makelist(value):
324     """Turn string into list type."""
325     if value[0] + value[-1] == "[]":
326         return eval(value)
327     elif value[0] + value[-1] == "\"\"":
328         return [value[1:-1]]
329     else:
330         return [value]
331
332
333 def makedict(value):
334     """Turn string into dict type."""
335     if value[0] + value[-1] == "{}":
336         return eval(value)
337     elif value.find(":") > 0:
338         return eval("{" + value + "}")
339     else:
340         return {value: None}