7d030c23346c1dab30352ddd48836785e0d7c797
[mudpy.git] / lib / mudpy / data.py
1 # -*- coding: utf-8 -*-
2 """Data interface functions for the mudpy engine."""
3
4 # Copyright (c) 2004-2013 Jeremy Stanley <fungi@yuggoth.org>. Permission
5 # to use, copy, modify, and distribute this software is granted under
6 # terms provided in the LICENSE file distributed with this software.
7
8 import codecs
9 import os
10 import re
11 import stat
12 import sys
13
14 # TODO: remove this check after the switch to py3k
15 try:
16     import configparser
17 except ImportError:
18     import ConfigParser as configparser
19
20 import mudpy
21
22
23 class DataFile:
24
25     """A file containing universe elements."""
26
27     def __init__(self, filename, universe):
28         self.filename = filename
29         self.universe = universe
30         self.load()
31
32     def load(self):
33         """Read a file and create elements accordingly."""
34         self.data = configparser.RawConfigParser()
35         self.modified = False
36         if os.access(self.filename, os.R_OK):
37             self.data.read(self.filename)
38         if not hasattr(self.universe, "files"):
39             self.universe.files = {}
40         self.universe.files[self.filename] = self
41         includes = []
42         if self.data.has_option("__control__", "include_files"):
43             for included in makelist(
44                 self.data.get("__control__", "include_files")
45             ):
46                 included = find_file(
47                     included,
48                     relative=self.filename,
49                     universe=self.universe
50                 )
51                 if included not in includes:
52                     includes.append(included)
53         if self.data.has_option("__control__", "include_dirs"):
54             for included in [
55                 os.path.join(x, "__init__.mpy") for x in makelist(
56                     self.data.get("__control__", "include_dirs")
57                 )
58             ]:
59                 included = find_file(
60                     included,
61                     relative=self.filename,
62                     universe=self.universe
63                 )
64                 if included not in includes:
65                     includes.append(included)
66         if self.data.has_option("__control__", "default_files"):
67             origins = makedict(
68                 self.data.get("__control__", "default_files")
69             )
70             for key in origins.keys():
71                 origins[key] = find_file(
72                     origins[key],
73                     relative=self.filename,
74                     universe=self.universe
75                 )
76                 if origins[key] not in includes:
77                     includes.append(origins[key])
78                 self.universe.default_origins[key] = origins[key]
79                 if key not in self.universe.categories:
80                     self.universe.categories[key] = {}
81         if self.data.has_option("__control__", "private_files"):
82             for item in makelist(
83                 self.data.get("__control__", "private_files")
84             ):
85                 item = find_file(
86                     item,
87                     relative=self.filename,
88                     universe=self.universe
89                 )
90                 if item not in includes:
91                     includes.append(item)
92                 if item not in self.universe.private_files:
93                     self.universe.private_files.append(item)
94         for section in self.data.sections():
95             if section != "__control__":
96                 mudpy.misc.Element(section, self.universe, self.filename)
97         for include_file in includes:
98             if not os.path.isabs(include_file):
99                 include_file = find_file(
100                     include_file,
101                     relative=self.filename,
102                     universe=self.universe
103                 )
104             if (include_file not in self.universe.files or not
105                     self.universe.files[include_file].is_writeable()):
106                 DataFile(include_file, self.universe)
107
108     def save(self):
109         """Write the data, if necessary."""
110
111         # when modified, writeable and has content or the file exists
112         if self.modified and self.is_writeable() and (
113            self.data.sections() or os.path.exists(self.filename)
114            ):
115
116             # make parent directories if necessary
117             if not os.path.exists(os.path.dirname(self.filename)):
118                 os.makedirs(os.path.dirname(self.filename))
119
120             # backup the file
121             if self.data.has_option("__control__", "backup_count"):
122                 max_count = self.data.has_option(
123                     "__control__", "backup_count")
124             else:
125                 max_count = self.universe.categories[
126                     "internal"
127                 ][
128                     "limits"
129                 ].getint("default_backup_count")
130             if os.path.exists(self.filename) and max_count:
131                 backups = []
132                 for candidate in os.listdir(os.path.dirname(self.filename)):
133                     if re.match(
134                        os.path.basename(self.filename) +
135                        """\.\d+$""", candidate
136                        ):
137                         backups.append(int(candidate.split(".")[-1]))
138                 backups.sort()
139                 backups.reverse()
140                 for old_backup in backups:
141                     if old_backup >= max_count - 1:
142                         os.remove(self.filename + "." + old_backup)
143                     elif not os.path.exists(
144                         self.filename + "." + old_backup + 1
145                     ):
146                         os.rename(
147                             self.filename + "." + old_backup,
148                             self.filename + "." + old_backup + 1
149                         )
150                 if not os.path.exists(self.filename + ".0"):
151                     os.rename(self.filename, self.filename + ".0")
152
153             # our data file
154             file_descriptor = codecs.open(self.filename, "w", "utf-8")
155
156             # if it's marked private, chmod it appropriately
157             if self.filename in self.universe.private_files and oct(
158                stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
159                ) != 0o0600:
160                 os.chmod(self.filename, 0o0600)
161
162             # write it back sorted, instead of using configparser
163             sections = self.data.sections()
164             sections.sort()
165             for section in sections:
166                 file_descriptor.write("[" + section + "]\n")
167                 options = self.data.options(section)
168                 options.sort()
169                 for option in options:
170                     file_descriptor.write(
171                         option + " = " +
172                         self.data.get(section, option) + "\n"
173                     )
174                 file_descriptor.write("\n")
175
176             # flush and close the file
177             file_descriptor.flush()
178             file_descriptor.close()
179
180             # unset the modified flag
181             self.modified = False
182
183     def is_writeable(self):
184         """Returns True if the __control__ read_only is False."""
185         return not self.data.has_option(
186             "__control__", "read_only"
187         ) or not self.data.getboolean(
188             "__control__", "read_only"
189         )
190
191
192 def find_file(
193     file_name=None,
194     root_path=None,
195     search_path=None,
196     default_dir=None,
197     relative=None,
198     universe=None
199 ):
200     """Return an absolute file path based on configuration."""
201
202     # make sure to get rid of any surrounding quotes first thing
203     if file_name:
204         file_name = file_name.strip("\"'")
205
206     # this is all unnecessary if it's already absolute
207     if file_name and os.path.isabs(file_name):
208         return os.path.realpath(file_name)
209
210     # when no file name is specified, look for <argv[0]>.conf
211     elif not file_name:
212         file_name = os.path.basename(sys.argv[0]) + ".conf"
213
214     # if a universe was provided, try to get some defaults from there
215     if universe:
216
217         if hasattr(
218            universe,
219            "contents"
220            ) and "internal:storage" in universe.contents:
221             storage = universe.categories["internal"]["storage"]
222             if not root_path:
223                 root_path = storage.get("root_path").strip("\"'")
224             if not search_path:
225                 search_path = storage.getlist("search_path")
226             if not default_dir:
227                 default_dir = storage.get("default_dir").strip("\"'")
228
229         # if there's only one file loaded, try to work around a chicken<egg
230         elif hasattr(universe, "files") and len(
231             universe.files
232         ) == 1 and not universe.files[
233                 list(universe.files.keys())[0]].is_writeable():
234             data_file = universe.files[list(universe.files.keys())[0]].data
235
236             # try for a fallback default directory
237             if not default_dir and data_file.has_option(
238                "internal:storage",
239                "default_dir"
240                ):
241                 default_dir = data_file.get(
242                     "internal:storage",
243                     "default_dir"
244                 ).strip("\"'")
245
246             # try for a fallback root path
247             if not root_path and data_file.has_option(
248                "internal:storage",
249                "root_path"
250                ):
251                 root_path = data_file.get(
252                     "internal:storage",
253                     "root_path"
254                 ).strip("\"'")
255
256             # try for a fallback search path
257             if not search_path and data_file.has_option(
258                "internal:storage",
259                "search_path"
260                ):
261                 search_path = makelist(
262                     data_file.get("internal:storage",
263                                   "search_path").strip("\"'")
264                 )
265
266         # another fallback root path, this time from the universe startdir
267         if not root_path and hasattr(universe, "startdir"):
268             root_path = universe.startdir
269
270     # when no root path is specified, assume the current working directory
271     if not root_path:
272         root_path = os.getcwd()
273
274     # otherwise, make sure it's absolute
275     elif not os.path.isabs(root_path):
276         root_path = os.path.realpath(root_path)
277
278     # if there's no search path, just use the root path and etc
279     if not search_path:
280         search_path = [root_path, "etc"]
281
282     # work on a copy of the search path, to avoid modifying the caller's
283     else:
284         search_path = search_path[:]
285
286     # if there's no default path, use the last element of the search path
287     if not default_dir:
288         default_dir = search_path[-1]
289
290     # if an existing file or directory reference was supplied, prepend it
291     if relative:
292         relative = relative.strip("\"'")
293         if os.path.isdir(relative):
294             search_path = [relative] + search_path
295         else:
296             search_path = [os.path.dirname(relative)] + search_path
297
298     # make the search path entries absolute and throw away any dupes
299     clean_search_path = []
300     for each_path in search_path:
301         each_path = each_path.strip("\"'")
302         if not os.path.isabs(each_path):
303             each_path = os.path.realpath(os.path.join(root_path, each_path))
304         if each_path not in clean_search_path:
305             clean_search_path.append(each_path)
306
307     # start hunting for the file now
308     for each_path in clean_search_path:
309
310         # if the file exists and is readable, we're done
311         if os.path.isfile(os.path.join(each_path, file_name)):
312             file_name = os.path.realpath(os.path.join(each_path, file_name))
313             break
314
315     # it didn't exist after all, so use the default path instead
316     if not os.path.isabs(file_name):
317         file_name = os.path.join(default_dir, file_name)
318     if not os.path.isabs(file_name):
319         file_name = os.path.join(root_path, file_name)
320
321     # and normalize it last thing before returning
322     file_name = os.path.realpath(file_name)
323
324     # normalize the resulting file path and hand it back
325     return file_name
326
327
328 def makelist(value):
329     """Turn string into list type."""
330     if value[0] + value[-1] == "[]":
331         return eval(value)
332     elif value[0] + value[-1] == "\"\"":
333         return [value[1:-1]]
334     else:
335         return [value]
336
337
338 def makedict(value):
339     """Turn string into dict type."""
340     if value[0] + value[-1] == "{}":
341         return eval(value)
342     elif value.find(":") > 0:
343         return eval("{" + value + "}")
344     else:
345         return {value: None}