1 """Data interface functions for the mudpy engine."""
3 # Copyright (c) 2004-2018 mudpy authors. Permission to use, copy,
4 # modify, and distribute this software is granted under terms
5 # provided in the LICENSE file distributed with this software.
15 class _IBSEmitter(yaml.emitter.Emitter):
17 """Override the default YAML Emitter to indent block sequences."""
19 def expect_block_sequence(self):
20 """Match the expectations of the ``yamllint`` style checker."""
22 # TODO(fungi) Get an option for this implemented upstream in
24 self.increase_indent(flow=False, indentless=False)
25 self.state = self.expect_first_block_sequence_item
28 class _IBSDumper(yaml.SafeDumper, _IBSEmitter):
30 """Use our _IBSEmitter instead of the default implementation."""
37 """A file containing universe elements and their facets."""
46 self.universe = universe
51 self.relative = relative
55 """Read a file, create elements and poplulate facets accordingly."""
57 self.source = find_file(
58 self.source, relative=self.relative, universe=self.universe)
60 self.data = yaml.safe_load(open(self.source))
61 log_entry = ("Loaded file %s into memory." % self.source, 5)
62 except FileNotFoundError:
63 # it's normal if the file is one which doesn't exist yet
65 log_entry = ("File %s is unavailable." % self.source, 6)
67 mudpy.misc.log(*log_entry)
69 # happens when we're not far enough along in the init process
70 self.universe.setup_loglines.append(log_entry)
71 if not hasattr(self.universe, "files"):
72 self.universe.files = {}
73 self.universe.files[self.source] = self
75 for node in list(self.data):
77 includes += self.data["_load"]
79 if node.startswith("_"):
81 facet_pos = node.rfind(".") + 1
82 prefix = node[:facet_pos].strip(".")
84 element = self.universe.contents[prefix]
86 element = mudpy.misc.Element(prefix, self.universe, self)
87 element.set(node[facet_pos:], self.data[node])
88 if prefix.startswith("mudpy.movement."):
89 self.universe.directions.add(
90 prefix[prefix.rfind(".") + 1:])
91 for include_file in includes:
92 if not os.path.isabs(include_file):
93 include_file = find_file(
96 universe=self.universe
98 if (include_file not in self.universe.files or not
99 self.universe.files[include_file].is_writeable()):
100 Data(include_file, self.universe)
103 """Write the data, if necessary."""
104 normal_umask = 0o0022
105 private_umask = 0o0077
106 private_file_mode = 0o0600
108 # when modified, writeable and has content or the file exists
109 if self.modified and self.is_writeable() and (
110 self.data or os.path.exists(self.source)
113 # make parent directories if necessary
114 old_umask = os.umask(normal_umask)
115 os.makedirs(os.path.dirname(self.source), exist_ok=True)
119 if "mudpy.limit" in self.universe.contents:
120 max_count = self.universe.contents["mudpy.limit"].get(
124 if os.path.exists(self.source) and max_count:
126 for candidate in os.listdir(os.path.dirname(self.source)):
128 os.path.basename(self.source) +
129 r"""\.\d+$""", candidate
131 backups.append(int(candidate.split(".")[-1]))
134 for old_backup in backups:
135 if old_backup >= max_count - 1:
136 os.remove(self.source + "." + str(old_backup))
137 elif not os.path.exists(
138 self.source + "." + str(old_backup + 1)
141 self.source + "." + str(old_backup),
142 self.source + "." + str(old_backup + 1)
144 if not os.path.exists(self.source + ".0"):
145 os.rename(self.source, self.source + ".0")
148 if "private" in self.flags:
149 old_umask = os.umask(private_umask)
150 file_descriptor = open(self.source, "w")
151 if oct(stat.S_IMODE(os.stat(
152 self.source)[stat.ST_MODE])) != private_file_mode:
153 # if it's marked private, chmod it appropriately
154 os.chmod(self.source, private_file_mode)
156 old_umask = os.umask(normal_umask)
157 file_descriptor = open(self.source, "w")
160 # write and close the file
161 yaml.dump(self.data, Dumper=_IBSDumper, allow_unicode=True,
162 default_flow_style=False, explicit_start=True, indent=4,
163 stream=file_descriptor)
164 file_descriptor.close()
166 # unset the modified flag
167 self.modified = False
169 def is_writeable(self):
170 """Returns True if the _lock is False."""
172 return not self.data.get("_lock", False)
186 """Return an absolute file path based on configuration."""
188 # this is all unnecessary if it's already absolute
189 if file_name and os.path.isabs(file_name):
190 return os.path.realpath(file_name)
192 # if a universe was provided, try to get some defaults from there
196 universe, "contents") and "mudpy.filing" in universe.contents:
197 filing = universe.contents["mudpy.filing"]
199 prefix = filing.get("prefix")
201 search = filing.get("search")
203 stash = filing.get("stash")
205 # if there's only one file loaded, try to work around a chicken<egg
206 elif hasattr(universe, "files") and len(
208 ) == 1 and not universe.files[
209 list(universe.files.keys())[0]].is_writeable():
210 data_file = universe.files[list(universe.files.keys())[0]].data
212 # try for a fallback default directory
214 stash = data_file.get(".mudpy.filing.stash", "")
216 # try for a fallback root path
218 prefix = data_file.get(".mudpy.filing.prefix", "")
220 # try for a fallback search path
222 search = data_file.get(".mudpy.filing.search", "")
224 # another fallback root path, this time from the universe startdir
225 if hasattr(universe, "startdir"):
227 prefix = universe.startdir
228 elif not os.path.isabs(prefix):
229 prefix = os.path.join(universe.startdir, prefix)
231 # when no root path is specified, assume the current working directory
232 if (not prefix or prefix == ".") and hasattr(universe, "startdir"):
233 prefix = universe.startdir
235 # make sure it's absolute
236 prefix = os.path.realpath(prefix)
238 # if there's no search path, just use the root path and etc
240 search = [prefix, "etc"]
242 # work on a copy of the search path, to avoid modifying the caller's
246 # if there's no default path, use the last component of the search path
250 # if an existing file or directory reference was supplied, prepend it
252 if os.path.isdir(relative):
253 search = [relative] + search
255 search = [os.path.dirname(relative)] + search
257 # make the search path entries absolute and throw away any dupes
259 for each_path in search:
260 if not os.path.isabs(each_path):
261 each_path = os.path.realpath(os.path.join(prefix, each_path))
262 if each_path not in clean_search:
263 clean_search.append(each_path)
265 # start hunting for the file now
266 for each_path in clean_search:
268 # construct the candidate path
269 candidate = os.path.join(each_path, file_name)
271 # if the file exists and is readable, we're done
272 if os.path.isfile(candidate):
273 file_name = os.path.realpath(candidate)
276 # if the path is a directory, look for an __init__ file
277 if os.path.isdir(candidate):
278 file_name = os.path.realpath(
279 os.path.join(candidate, "__init__.yaml"))
282 # it didn't exist after all, so use the default path instead
283 if not os.path.isabs(file_name):
284 file_name = os.path.join(stash, file_name)
285 if not os.path.isabs(file_name):
286 file_name = os.path.join(prefix, file_name)
288 # and normalize it last thing before returning
289 file_name = os.path.realpath(file_name)