Import a correct configparser for the interpreter
[mudpy.git] / lib / mudpy / data.py
1 # -*- coding: utf-8 -*-
2 """Data interface functions for the mudpy engine."""
3
4 # Copyright (c) 2004-2013 Jeremy Stanley <fungi@yuggoth.org>. Permission
5 # to use, copy, modify, and distribute this software is granted under
6 # terms provided in the LICENSE file distributed with this software.
7
8
9 class DataFile:
10
11     """A file containing universe elements."""
12
13     def __init__(self, filename, universe):
14         self.filename = filename
15         self.universe = universe
16         self.load()
17
18     def load(self):
19         """Read a file and create elements accordingly."""
20         import mudpy.misc
21         import os
22         import os.path
23         # TODO: remove this check after the switch to py3k
24         try:
25             import configparser
26         except ImportError:
27             import ConfigParser as configparser
28         self.data = configparser.RawConfigParser()
29         self.modified = False
30         if os.access(self.filename, os.R_OK):
31             self.data.read(self.filename)
32         if not hasattr(self.universe, "files"):
33             self.universe.files = {}
34         self.universe.files[self.filename] = self
35         includes = []
36         if self.data.has_option("__control__", "include_files"):
37             for included in makelist(
38                 self.data.get("__control__", "include_files")
39             ):
40                 included = find_file(
41                     included,
42                     relative=self.filename,
43                     universe=self.universe
44                 )
45                 if included not in includes:
46                     includes.append(included)
47         if self.data.has_option("__control__", "include_dirs"):
48             for included in [
49                 os.path.join(x, "__init__.mpy") for x in makelist(
50                     self.data.get("__control__", "include_dirs")
51                 )
52             ]:
53                 included = find_file(
54                     included,
55                     relative=self.filename,
56                     universe=self.universe
57                 )
58                 if included not in includes:
59                     includes.append(included)
60         if self.data.has_option("__control__", "default_files"):
61             origins = makedict(
62                 self.data.get("__control__", "default_files")
63             )
64             for key in origins.keys():
65                 origins[key] = find_file(
66                     origins[key],
67                     relative=self.filename,
68                     universe=self.universe
69                 )
70                 if origins[key] not in includes:
71                     includes.append(origins[key])
72                 self.universe.default_origins[key] = origins[key]
73                 if key not in self.universe.categories:
74                     self.universe.categories[key] = {}
75         if self.data.has_option("__control__", "private_files"):
76             for item in makelist(
77                 self.data.get("__control__", "private_files")
78             ):
79                 item = find_file(
80                     item,
81                     relative=self.filename,
82                     universe=self.universe
83                 )
84                 if item not in includes:
85                     includes.append(item)
86                 if item not in self.universe.private_files:
87                     self.universe.private_files.append(item)
88         for section in self.data.sections():
89             if section != "__control__":
90                 mudpy.misc.Element(section, self.universe, self.filename)
91         for include_file in includes:
92             if not os.path.isabs(include_file):
93                 include_file = find_file(
94                     include_file,
95                     relative=self.filename,
96                     universe=self.universe
97                 )
98             if (include_file not in self.universe.files or not
99                     self.universe.files[include_file].is_writeable()):
100                 DataFile(include_file, self.universe)
101
102     def save(self):
103         """Write the data, if necessary."""
104         import codecs
105         import os
106         import os.path
107         import re
108         import stat
109
110         # when modified, writeable and has content or the file exists
111         if self.modified and self.is_writeable() and (
112            self.data.sections() or os.path.exists(self.filename)
113            ):
114
115             # make parent directories if necessary
116             if not os.path.exists(os.path.dirname(self.filename)):
117                 os.makedirs(os.path.dirname(self.filename))
118
119             # backup the file
120             if self.data.has_option("__control__", "backup_count"):
121                 max_count = self.data.has_option(
122                     "__control__", "backup_count")
123             else:
124                 max_count = self.universe.categories[
125                     "internal"
126                 ][
127                     "limits"
128                 ].getint("default_backup_count")
129             if os.path.exists(self.filename) and max_count:
130                 backups = []
131                 for candidate in os.listdir(os.path.dirname(self.filename)):
132                     if re.match(
133                        os.path.basename(self.filename) +
134                        """\.\d+$""", candidate
135                        ):
136                         backups.append(int(candidate.split(".")[-1]))
137                 backups.sort()
138                 backups.reverse()
139                 for old_backup in backups:
140                     if old_backup >= max_count - 1:
141                         os.remove(self.filename + "." + old_backup)
142                     elif not os.path.exists(
143                         self.filename + "." + old_backup + 1
144                     ):
145                         os.rename(
146                             self.filename + "." + old_backup,
147                             self.filename + "." + old_backup + 1
148                         )
149                 if not os.path.exists(self.filename + ".0"):
150                     os.rename(self.filename, self.filename + ".0")
151
152             # our data file
153             file_descriptor = codecs.open(self.filename, "w", "utf-8")
154
155             # if it's marked private, chmod it appropriately
156             if self.filename in self.universe.private_files and oct(
157                stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
158                ) != 0o0600:
159                 os.chmod(self.filename, 0o0600)
160
161             # write it back sorted, instead of using configparser
162             sections = self.data.sections()
163             sections.sort()
164             for section in sections:
165                 file_descriptor.write("[" + section + "]\n")
166                 options = self.data.options(section)
167                 options.sort()
168                 for option in options:
169                     file_descriptor.write(
170                         option + " = " +
171                         self.data.get(section, option) + "\n"
172                     )
173                 file_descriptor.write("\n")
174
175             # flush and close the file
176             file_descriptor.flush()
177             file_descriptor.close()
178
179             # unset the modified flag
180             self.modified = False
181
182     def is_writeable(self):
183         """Returns True if the __control__ read_only is False."""
184         return not self.data.has_option(
185             "__control__", "read_only"
186         ) or not self.data.getboolean(
187             "__control__", "read_only"
188         )
189
190
191 def find_file(
192     file_name=None,
193     root_path=None,
194     search_path=None,
195     default_dir=None,
196     relative=None,
197     universe=None
198 ):
199     """Return an absolute file path based on configuration."""
200     import os
201     import os.path
202     import sys
203
204     # make sure to get rid of any surrounding quotes first thing
205     if file_name:
206         file_name = file_name.strip("\"'")
207
208     # this is all unnecessary if it's already absolute
209     if file_name and os.path.isabs(file_name):
210         return os.path.realpath(file_name)
211
212     # when no file name is specified, look for <argv[0]>.conf
213     elif not file_name:
214         file_name = os.path.basename(sys.argv[0]) + ".conf"
215
216     # if a universe was provided, try to get some defaults from there
217     if universe:
218
219         if hasattr(
220            universe,
221            "contents"
222            ) and "internal:storage" in universe.contents:
223             storage = universe.categories["internal"]["storage"]
224             if not root_path:
225                 root_path = storage.get("root_path").strip("\"'")
226             if not search_path:
227                 search_path = storage.getlist("search_path")
228             if not default_dir:
229                 default_dir = storage.get("default_dir").strip("\"'")
230
231         # if there's only one file loaded, try to work around a chicken<egg
232         elif hasattr(universe, "files") and len(
233             universe.files
234         ) == 1 and not universe.files[
235                 list(universe.files.keys())[0]].is_writeable():
236             data_file = universe.files[list(universe.files.keys())[0]].data
237
238             # try for a fallback default directory
239             if not default_dir and data_file.has_option(
240                "internal:storage",
241                "default_dir"
242                ):
243                 default_dir = data_file.get(
244                     "internal:storage",
245                     "default_dir"
246                 ).strip("\"'")
247
248             # try for a fallback root path
249             if not root_path and data_file.has_option(
250                "internal:storage",
251                "root_path"
252                ):
253                 root_path = data_file.get(
254                     "internal:storage",
255                     "root_path"
256                 ).strip("\"'")
257
258             # try for a fallback search path
259             if not search_path and data_file.has_option(
260                "internal:storage",
261                "search_path"
262                ):
263                 search_path = makelist(
264                     data_file.get("internal:storage",
265                                   "search_path").strip("\"'")
266                 )
267
268         # another fallback root path, this time from the universe startdir
269         if not root_path and hasattr(universe, "startdir"):
270             root_path = universe.startdir
271
272     # when no root path is specified, assume the current working directory
273     if not root_path:
274         root_path = os.getcwd()
275
276     # otherwise, make sure it's absolute
277     elif not os.path.isabs(root_path):
278         root_path = os.path.realpath(root_path)
279
280     # if there's no search path, just use the root path and etc
281     if not search_path:
282         search_path = [root_path, "etc"]
283
284     # work on a copy of the search path, to avoid modifying the caller's
285     else:
286         search_path = search_path[:]
287
288     # if there's no default path, use the last element of the search path
289     if not default_dir:
290         default_dir = search_path[-1]
291
292     # if an existing file or directory reference was supplied, prepend it
293     if relative:
294         relative = relative.strip("\"'")
295         if os.path.isdir(relative):
296             search_path = [relative] + search_path
297         else:
298             search_path = [os.path.dirname(relative)] + search_path
299
300     # make the search path entries absolute and throw away any dupes
301     clean_search_path = []
302     for each_path in search_path:
303         each_path = each_path.strip("\"'")
304         if not os.path.isabs(each_path):
305             each_path = os.path.realpath(os.path.join(root_path, each_path))
306         if each_path not in clean_search_path:
307             clean_search_path.append(each_path)
308
309     # start hunting for the file now
310     for each_path in clean_search_path:
311
312         # if the file exists and is readable, we're done
313         if os.path.isfile(os.path.join(each_path, file_name)):
314             file_name = os.path.realpath(os.path.join(each_path, file_name))
315             break
316
317     # it didn't exist after all, so use the default path instead
318     if not os.path.isabs(file_name):
319         file_name = os.path.join(default_dir, file_name)
320     if not os.path.isabs(file_name):
321         file_name = os.path.join(root_path, file_name)
322
323     # and normalize it last thing before returning
324     file_name = os.path.realpath(file_name)
325
326     # normalize the resulting file path and hand it back
327     return file_name
328
329
330 def makelist(value):
331     """Turn string into list type."""
332     if value[0] + value[-1] == "[]":
333         return eval(value)
334     elif value[0] + value[-1] == "\"\"":
335         return [value[1:-1]]
336     else:
337         return [value]
338
339
340 def makedict(value):
341     """Turn string into dict type."""
342     if value[0] + value[-1] == "{}":
343         return eval(value)
344     elif value.find(":") > 0:
345         return eval("{" + value + "}")
346     else:
347         return {value: None}