1 """Data interface functions for the mudpy engine."""
3 # Copyright (c) 2004-2016 Jeremy Stanley <fungi@yuggoth.org>. Permission
4 # to use, copy, modify, and distribute this software is granted under
5 # terms provided in the LICENSE file distributed with this software.
17 """A file containing universe elements and their facets."""
19 def __init__(self, filename, universe):
20 self.filename = filename
21 self.universe = universe
26 """Read a file, create elements and poplulate facets accordingly."""
29 self.data = yaml.safe_load(open(self.filename))
30 except FileNotFoundError:
31 # it's normal if the file is one which doesn't exist yet
32 log_entry = ("File %s is unavailable." % self.filename, 6)
34 mudpy.misc.log(*log_entry)
36 # happens when we're not far enough along in the init process
37 self.universe.setup_loglines.append(log_entry)
38 if not hasattr(self.universe, "files"):
39 self.universe.files = {}
40 self.universe.files[self.filename] = self
42 if "__control__" in self.data:
43 if "include_files" in self.data["__control__"]:
44 for included in self.data["__control__"]["include_files"]:
47 relative=self.filename,
48 universe=self.universe)
49 if included not in includes:
50 includes.append(included)
51 if "include_dirs" in self.data["__control__"]:
53 os.path.join(x, "__init__.yaml") for x in
54 self.data["__control__"]["include_dirs"]
58 relative=self.filename,
59 universe=self.universe
61 if included not in includes:
62 includes.append(included)
63 if "default_files" in self.data["__control__"]:
64 origins = self.data["__control__"]["default_files"]
65 for key in origins.keys():
66 origins[key] = find_file(
68 relative=self.filename,
69 universe=self.universe
71 if origins[key] not in includes:
72 includes.append(origins[key])
73 self.universe.default_origins[key] = origins[key]
74 if key not in self.universe.categories:
75 self.universe.categories[key] = {}
76 if "private_files" in self.data["__control__"]:
77 for item in self.data["__control__"]["private_files"]:
80 relative=self.filename,
81 universe=self.universe
83 if item not in includes:
85 if item not in self.universe.private_files:
86 self.universe.private_files.append(item)
87 for node in list(self.data):
88 if node == "__control__":
90 facet_pos = node.rfind(".") + 1
92 mudpy.misc.Element(node, self.universe, self.filename,
95 prefix = node[:facet_pos].strip(".")
97 element = self.universe.contents[prefix]
99 element = mudpy.misc.Element(prefix, self.universe,
101 element.set(node[facet_pos:], self.data[node])
102 if prefix.startswith("mudpy.movement."):
103 self.universe.directions.add(
104 prefix[prefix.rfind(".") + 1:])
105 for include_file in includes:
106 if not os.path.isabs(include_file):
107 include_file = find_file(
109 relative=self.filename,
110 universe=self.universe
112 if (include_file not in self.universe.files or not
113 self.universe.files[include_file].is_writeable()):
114 DataFile(include_file, self.universe)
117 """Write the data, if necessary."""
118 normal_umask = 0o0022
119 private_umask = 0o0077
120 private_file_mode = 0o0600
122 # when modified, writeable and has content or the file exists
123 if self.modified and self.is_writeable() and (
124 self.data or os.path.exists(self.filename)
127 # make parent directories if necessary
128 if not os.path.exists(os.path.dirname(self.filename)):
129 old_umask = os.umask(normal_umask)
130 os.makedirs(os.path.dirname(self.filename))
134 if "__control__" in self.data and "backup_count" in self.data[
136 max_count = self.data["__control__"]["backup_count"]
138 max_count = self.universe.contents["mudpy.limit"].get(
140 if os.path.exists(self.filename) and max_count:
142 for candidate in os.listdir(os.path.dirname(self.filename)):
144 os.path.basename(self.filename) +
145 """\.\d+$""", candidate
147 backups.append(int(candidate.split(".")[-1]))
150 for old_backup in backups:
151 if old_backup >= max_count - 1:
152 os.remove(self.filename + "." + str(old_backup))
153 elif not os.path.exists(
154 self.filename + "." + str(old_backup + 1)
157 self.filename + "." + str(old_backup),
158 self.filename + "." + str(old_backup + 1)
160 if not os.path.exists(self.filename + ".0"):
161 os.rename(self.filename, self.filename + ".0")
164 if self.filename in self.universe.private_files:
165 old_umask = os.umask(private_umask)
166 file_descriptor = open(self.filename, "w")
167 if oct(stat.S_IMODE(os.stat(
168 self.filename)[stat.ST_MODE])) != private_file_mode:
169 # if it's marked private, chmod it appropriately
170 os.chmod(self.filename, private_file_mode)
172 old_umask = os.umask(normal_umask)
173 file_descriptor = open(self.filename, "w")
176 # write and close the file
177 yaml.safe_dump(self.data, allow_unicode=True,
178 default_flow_style=False, stream=file_descriptor)
179 file_descriptor.close()
181 # unset the modified flag
182 self.modified = False
184 def is_writeable(self):
185 """Returns True if the __control__ read_only is False."""
187 return not self.data["__control__"].get("read_only", False)
200 """Return an absolute file path based on configuration."""
202 # make sure to get rid of any surrounding quotes first thing
204 file_name = file_name.strip("\"'")
206 # this is all unnecessary if it's already absolute
207 if file_name and os.path.isabs(file_name):
208 return os.path.realpath(file_name)
210 # if a universe was provided, try to get some defaults from there
216 ) and "internal:storage" in universe.contents:
217 storage = universe.categories["internal"]["storage"]
219 root_path = storage.get("root_path").strip("\"'")
221 search_path = storage.get("search_path")
223 default_dir = storage.get("default_dir").strip("\"'")
225 # if there's only one file loaded, try to work around a chicken<egg
226 elif hasattr(universe, "files") and len(
228 ) == 1 and not universe.files[
229 list(universe.files.keys())[0]].is_writeable():
230 data_file = universe.files[list(universe.files.keys())[0]].data
232 # try for a fallback default directory
234 default_dir = data_file.get(
235 "internal:storage", "").get("default_dir", "")
237 # try for a fallback root path
239 root_path = data_file.get(
240 "internal:storage", "").get("root_path", "")
242 # try for a fallback search path
244 search_path = data_file.get(
245 "internal:storage", "").get("search_path", "")
247 # another fallback root path, this time from the universe startdir
248 if not root_path and hasattr(universe, "startdir"):
249 root_path = universe.startdir
251 # when no root path is specified, assume the current working directory
253 root_path = os.getcwd()
255 # otherwise, make sure it's absolute
256 elif not os.path.isabs(root_path):
257 root_path = os.path.realpath(root_path)
259 # if there's no search path, just use the root path and etc
261 search_path = [root_path, "etc"]
263 # work on a copy of the search path, to avoid modifying the caller's
265 search_path = search_path[:]
267 # if there's no default path, use the last component of the search path
269 default_dir = search_path[-1]
271 # if an existing file or directory reference was supplied, prepend it
273 relative = relative.strip("\"'")
274 if os.path.isdir(relative):
275 search_path = [relative] + search_path
277 search_path = [os.path.dirname(relative)] + search_path
279 # make the search path entries absolute and throw away any dupes
280 clean_search_path = []
281 for each_path in search_path:
282 each_path = each_path.strip("\"'")
283 if not os.path.isabs(each_path):
284 each_path = os.path.realpath(os.path.join(root_path, each_path))
285 if each_path not in clean_search_path:
286 clean_search_path.append(each_path)
288 # start hunting for the file now
289 for each_path in clean_search_path:
291 # if the file exists and is readable, we're done
292 if os.path.isfile(os.path.join(each_path, file_name)):
293 file_name = os.path.realpath(os.path.join(each_path, file_name))
296 # it didn't exist after all, so use the default path instead
297 if not os.path.isabs(file_name):
298 file_name = os.path.join(default_dir, file_name)
299 if not os.path.isabs(file_name):
300 file_name = os.path.join(root_path, file_name)
302 # and normalize it last thing before returning
303 file_name = os.path.realpath(file_name)
305 # normalize the resulting file path and hand it back