1 # -*- coding: utf-8 -*-
2 """Data interface functions for the mudpy engine."""
4 # Copyright (c) 2004-2014 Jeremy Stanley <fungi@yuggoth.org>. Permission
5 # to use, copy, modify, and distribute this software is granted under
6 # terms provided in the LICENSE file distributed with this software.
21 """A file containing universe elements."""
23 def __init__(self, filename, universe):
24 self.filename = filename
25 self.universe = universe
30 """Read a file and create elements accordingly."""
31 # TODO(fungi): remove this indirection after the YAML transition
32 if self.filename.endswith('.yaml'):
38 """Read a file and create elements accordingly."""
39 # TODO(fungi): remove this parameter after the YAML transition
43 self.data = yaml.load(open(self.filename))
44 except FileNotFoundError:
45 # it's normal if the file is one which doesn't exist yet
47 mudpy.misc.log("Couldn't read %s file." % self.filename, 6)
49 # happens when we're not far enough along in the init process
51 if not hasattr(self.universe, "files"):
52 self.universe.files = {}
53 self.universe.files[self.filename] = self
55 if "__control__" in self.data:
56 if "include_files" in self.data["__control__"]:
57 for included in self.data["__control__"]["include_files"]:
60 relative=self.filename,
61 universe=self.universe)
62 if included not in includes:
63 includes.append(included)
64 if "include_dirs" in self.data["__control__"]:
66 os.path.join(x, "__init__.yaml") for x in
67 self.data["__control__"]["include_dirs"]
71 relative=self.filename,
72 universe=self.universe
74 if included not in includes:
75 includes.append(included)
76 # TODO(fungi): remove this loop after the YAML transition
78 os.path.join(x, "__init__.mpy") for x in
79 self.data["__control__"]["include_dirs"]
83 relative=self.filename,
84 universe=self.universe
86 if included not in includes:
87 includes.append(included)
88 if "default_files" in self.data["__control__"]:
89 origins = self.data["__control__"]["default_files"]
90 for key in origins.keys():
91 origins[key] = find_file(
93 relative=self.filename,
94 universe=self.universe
96 if origins[key] not in includes:
97 includes.append(origins[key])
98 self.universe.default_origins[key] = origins[key]
99 if key not in self.universe.categories:
100 self.universe.categories[key] = {}
101 if "private_files" in self.data["__control__"]:
102 for item in self.data["__control__"]["private_files"]:
105 relative=self.filename,
106 universe=self.universe
108 if item not in includes:
109 includes.append(item)
110 if item not in self.universe.private_files:
111 self.universe.private_files.append(item)
112 for element in self.data:
113 if element != "__control__":
114 mudpy.misc.Element(element, self.universe, self.filename)
115 for include_file in includes:
116 if not os.path.isabs(include_file):
117 include_file = find_file(
119 relative=self.filename,
120 universe=self.universe
122 if (include_file not in self.universe.files or not
123 self.universe.files[include_file].is_writeable()):
124 DataFile(include_file, self.universe)
126 # TODO(fungi): remove this method after the YAML transition
128 """Read a file and create elements accordingly."""
130 self.modified = False
131 self.data = configparser.RawConfigParser()
132 if os.access(self.filename, os.R_OK):
133 self.data.read(self.filename)
134 if not hasattr(self.universe, "files"):
135 self.universe.files = {}
136 self.universe.files[self.filename] = self
138 if self.data.has_option("__control__", "include_files"):
139 for included in makelist(
140 self.data.get("__control__", "include_files")
142 included = find_file(
144 relative=self.filename,
145 universe=self.universe
147 if included not in includes:
148 includes.append(included)
149 if self.data.has_option("__control__", "include_dirs"):
151 os.path.join(x, "__init__.yaml") for x in makelist(
152 self.data["__control__"]["include_dirs"]
155 included = find_file(
157 relative=self.filename,
158 universe=self.universe
160 if included not in includes:
161 includes.append(included)
163 os.path.join(x, "__init__.mpy") for x in makelist(
164 self.data.get("__control__", "include_dirs")
167 included = find_file(
169 relative=self.filename,
170 universe=self.universe
172 if included not in includes:
173 includes.append(included)
174 if self.data.has_option("__control__", "default_files"):
176 self.data.get("__control__", "default_files")
178 for key in origins.keys():
179 origins[key] = find_file(
181 relative=self.filename,
182 universe=self.universe
184 if origins[key] not in includes:
185 includes.append(origins[key])
186 self.universe.default_origins[key] = origins[key]
187 if key not in self.universe.categories:
188 self.universe.categories[key] = {}
189 if self.data.has_option("__control__", "private_files"):
190 for item in makelist(
191 self.data.get("__control__", "private_files")
195 relative=self.filename,
196 universe=self.universe
198 if item not in includes:
199 includes.append(item)
200 if item not in self.universe.private_files:
201 self.universe.private_files.append(item)
202 for section in self.data.sections():
203 if section != "__control__":
204 mudpy.misc.Element(section, self.universe, self.filename)
205 for include_file in includes:
206 if not os.path.isabs(include_file):
207 include_file = find_file(
209 relative=self.filename,
210 universe=self.universe
212 if (include_file not in self.universe.files or not
213 self.universe.files[include_file].is_writeable()):
214 DataFile(include_file, self.universe)
216 # TODO(fungi): this should support writing YAML
218 """Write the data, if necessary."""
220 # when modified, writeable and has content or the file exists
221 if self.modified and self.is_writeable() and (
222 self.data.sections() or os.path.exists(self.filename)
225 # make parent directories if necessary
226 if not os.path.exists(os.path.dirname(self.filename)):
227 os.makedirs(os.path.dirname(self.filename))
230 if self.data.has_option("__control__", "backup_count"):
231 max_count = self.data.has_option(
232 "__control__", "backup_count")
234 max_count = self.universe.categories[
238 ].getint("default_backup_count")
239 if os.path.exists(self.filename) and max_count:
241 for candidate in os.listdir(os.path.dirname(self.filename)):
243 os.path.basename(self.filename) +
244 """\.\d+$""", candidate
246 backups.append(int(candidate.split(".")[-1]))
249 for old_backup in backups:
250 if old_backup >= max_count - 1:
251 os.remove(self.filename + "." + old_backup)
252 elif not os.path.exists(
253 self.filename + "." + old_backup + 1
256 self.filename + "." + old_backup,
257 self.filename + "." + old_backup + 1
259 if not os.path.exists(self.filename + ".0"):
260 os.rename(self.filename, self.filename + ".0")
263 file_descriptor = codecs.open(self.filename, "w", "utf-8")
265 # if it's marked private, chmod it appropriately
266 if self.filename in self.universe.private_files and oct(
267 stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
269 os.chmod(self.filename, 0o0600)
271 # write it back sorted, instead of using configparser
272 sections = self.data.sections()
274 for section in sections:
275 file_descriptor.write("[" + section + "]\n")
276 options = self.data.options(section)
278 for option in options:
279 file_descriptor.write(
281 self.data.get(section, option) + "\n"
283 file_descriptor.write("\n")
285 # flush and close the file
286 file_descriptor.flush()
287 file_descriptor.close()
289 # unset the modified flag
290 self.modified = False
292 def is_writeable(self):
293 """Returns True if the __control__ read_only is False."""
294 # TODO(fungi): remove this indirection after the YAML transition
295 if self._format == "yaml":
297 return not self.data["__control__"].get("read_only", False)
301 return not self.data.has_option(
302 "__control__", "read_only"
303 ) or not self.data.getboolean(
304 "__control__", "read_only"
316 """Return an absolute file path based on configuration."""
318 # make sure to get rid of any surrounding quotes first thing
320 file_name = file_name.strip("\"'")
322 # this is all unnecessary if it's already absolute
323 if file_name and os.path.isabs(file_name):
324 return os.path.realpath(file_name)
326 # when no file name is specified, look for <argv[0]>.conf
328 file_name = os.path.basename(sys.argv[0]) + ".conf"
330 # if a universe was provided, try to get some defaults from there
336 ) and "internal:storage" in universe.contents:
337 storage = universe.categories["internal"]["storage"]
339 root_path = storage.get("root_path").strip("\"'")
341 search_path = storage.getlist("search_path")
343 default_dir = storage.get("default_dir").strip("\"'")
345 # if there's only one file loaded, try to work around a chicken<egg
346 elif hasattr(universe, "files") and len(
348 ) == 1 and not universe.files[
349 list(universe.files.keys())[0]].is_writeable():
350 data_file = universe.files[list(universe.files.keys())[0]].data
352 # try for a fallback default directory
353 if not default_dir and data_file.has_option(
357 default_dir = data_file.get(
362 # try for a fallback root path
363 if not root_path and data_file.has_option(
367 root_path = data_file.get(
372 # try for a fallback search path
373 if not search_path and data_file.has_option(
377 search_path = makelist(
378 data_file.get("internal:storage",
379 "search_path").strip("\"'")
382 # another fallback root path, this time from the universe startdir
383 if not root_path and hasattr(universe, "startdir"):
384 root_path = universe.startdir
386 # when no root path is specified, assume the current working directory
388 root_path = os.getcwd()
390 # otherwise, make sure it's absolute
391 elif not os.path.isabs(root_path):
392 root_path = os.path.realpath(root_path)
394 # if there's no search path, just use the root path and etc
396 search_path = [root_path, "etc"]
398 # work on a copy of the search path, to avoid modifying the caller's
400 search_path = search_path[:]
402 # if there's no default path, use the last element of the search path
404 default_dir = search_path[-1]
406 # if an existing file or directory reference was supplied, prepend it
408 relative = relative.strip("\"'")
409 if os.path.isdir(relative):
410 search_path = [relative] + search_path
412 search_path = [os.path.dirname(relative)] + search_path
414 # make the search path entries absolute and throw away any dupes
415 clean_search_path = []
416 for each_path in search_path:
417 each_path = each_path.strip("\"'")
418 if not os.path.isabs(each_path):
419 each_path = os.path.realpath(os.path.join(root_path, each_path))
420 if each_path not in clean_search_path:
421 clean_search_path.append(each_path)
423 # start hunting for the file now
424 for each_path in clean_search_path:
426 # if the file exists and is readable, we're done
427 if os.path.isfile(os.path.join(each_path, file_name)):
428 file_name = os.path.realpath(os.path.join(each_path, file_name))
431 # it didn't exist after all, so use the default path instead
432 if not os.path.isabs(file_name):
433 file_name = os.path.join(default_dir, file_name)
434 if not os.path.isabs(file_name):
435 file_name = os.path.join(root_path, file_name)
437 # and normalize it last thing before returning
438 file_name = os.path.realpath(file_name)
440 # normalize the resulting file path and hand it back
445 """Turn string into list type."""
446 if value[0] + value[-1] == "[]":
448 elif value[0] + value[-1] == "\"\"":
455 """Turn string into dict type."""
456 if value[0] + value[-1] == "{}":
458 elif value.find(":") > 0:
459 return eval("{" + value + "}")