- u"""A file containing universe elements."""
- def __init__(self, filename, universe):
- self.filename = filename
- self.universe = universe
- self.load()
- def load(self):
- u"""Read a file and create elements accordingly."""
- import ConfigParser, misc, os, os.path
- self.data = ConfigParser.RawConfigParser()
- self.modified = False
- if os.access(self.filename, os.R_OK): self.data.read(self.filename)
- if not hasattr(self.universe, u"files"): self.universe.files = {}
- self.universe.files[self.filename] = self
- includes = []
- if self.data.has_option(u"__control__", u"include_files"):
- for included in makelist(
- self.data.get(u"__control__", u"include_files")
- ):
- included = find_file(
- included,
- relative=self.filename,
- universe=self.universe
- )
- if included not in includes: includes.append(included)
- if self.data.has_option(u"__control__", u"include_dirs"):
- for included in [
- os.path.join(x, u"__init__.mpy") for x in makelist(
- self.data.get(u"__control__", u"include_dirs")
- )
- ]:
- included = find_file(
- included,
- relative=self.filename,
- universe=self.universe
- )
- if included not in includes: includes.append(included)
- if self.data.has_option(u"__control__", u"default_files"):
- origins = makedict(
- self.data.get(u"__control__", u"default_files")
- )
- for key in origins.keys():
- origins[key] = find_file(
- origins[key],
- relative=self.filename,
- universe=self.universe
- )
- if origins[key] not in includes: includes.append(origins[key])
- self.universe.default_origins[key] = origins[key]
- if key not in self.universe.categories:
- self.universe.categories[key] = {}
- if self.data.has_option(u"__control__", u"private_files"):
- for item in makelist(
- self.data.get(u"__control__", u"private_files")
- ):
- item = find_file(
- item,
- relative=self.filename,
- universe=self.universe
- )
- if item not in includes: includes.append(item)
- if item not in self.universe.private_files:
- self.universe.private_files.append(item)
- for section in self.data.sections():
- if section != u"__control__":
- misc.Element(section, self.universe, self.filename)
- for include_file in includes:
- if not os.path.isabs(include_file):
- include_file = find_file(
- include_file,
- relative=self.filename,
- universe=self.universe
- )
- if include_file not in self.universe.files or not self.universe.files[
- include_file
- ].is_writeable():
- DataFile(include_file, self.universe)
- def save(self):
- u"""Write the data, if necessary."""
- import codecs, os, os.path, re, stat
-
- # when modified, writeable and has content or the file exists
- if self.modified and self.is_writeable() and (
- self.data.sections() or os.path.exists(self.filename)
- ):
-
- # make parent directories if necessary
- if not os.path.exists(os.path.dirname(self.filename)):
- os.makedirs(os.path.dirname(self.filename))
-
- # backup the file
- if self.data.has_option(u"__control__", u"backup_count"):
- max_count = self.data.has_option(u"__control__", u"backup_count")
- else:
- max_count = self.universe.categories[
- u"internal"
- ][
- u"limits"
- ].getint(u"default_backup_count")
- if os.path.exists(self.filename) and max_count:
- backups = []
- for candidate in os.listdir(os.path.dirname(self.filename)):
- if re.match(
- os.path.basename(self.filename) + u"""\.\d+$""", candidate
- ):
- backups.append(int(candidate.split(u".")[-1]))
- backups.sort()
- backups.reverse()
- for old_backup in backups:
- if old_backup >= max_count-1:
- os.remove(self.filename+u"."+unicode(old_backup))
- elif not os.path.exists(
- self.filename+u"."+unicode(old_backup+1)
- ):
- os.rename(
- self.filename + u"."+unicode(old_backup),
- self.filename + u"."+unicode( old_backup + 1 )
- )
- if not os.path.exists(self.filename+u".0"):
- os.rename( self.filename, self.filename + u".0" )
-
- # our data file
- file_descriptor = codecs.open(self.filename, u"w", u"utf-8")
-
- # if it's marked private, chmod it appropriately
- if self.filename in self.universe.private_files and oct(
- stat.S_IMODE( os.stat(self.filename)[stat.ST_MODE] )
- ) != 0600:
- os.chmod(self.filename, 0600)
-
- # write it back sorted, instead of using ConfigParser
- sections = self.data.sections()
- sections.sort()
- for section in sections:
- file_descriptor.write(u"[" + section + u"]\n")
- options = self.data.options(section)
- options.sort()
- for option in options:
- file_descriptor.write(
- option + u" = " + self.data.get(section, option) + u"\n"
- )
- file_descriptor.write(u"\n")
-
- # flush and close the file
- file_descriptor.flush()
- file_descriptor.close()
-
- # unset the modified flag
- self.modified = False
- def is_writeable(self):
- u"""Returns True if the __control__ read_only is False."""
- return not self.data.has_option(
- u"__control__", u"read_only"
- ) or not self.data.getboolean(
- u"__control__", u"read_only"
- )
+
+ """A file containing universe elements and their facets."""
+
+ def __init__(self, filename, universe):
+ self.filename = filename
+ self.universe = universe
+ self.data = {}
+ self.load()
+
+ def load(self):
+ """Read a file, create elements and poplulate facets accordingly."""
+ self.modified = False
+ try:
+ self.data = yaml.safe_load(open(self.filename))
+ except FileNotFoundError:
+ # it's normal if the file is one which doesn't exist yet
+ log_entry = ("File %s is unavailable." % self.filename, 6)
+ try:
+ mudpy.misc.log(*log_entry)
+ except NameError:
+ # happens when we're not far enough along in the init process
+ self.universe.setup_loglines.append(log_entry)
+ if not hasattr(self.universe, "files"):
+ self.universe.files = {}
+ self.universe.files[self.filename] = self
+ includes = []
+ if "__control__" in self.data:
+ if "include_files" in self.data["__control__"]:
+ for included in self.data["__control__"]["include_files"]:
+ included = find_file(
+ included,
+ relative=self.filename,
+ universe=self.universe)
+ if included not in includes:
+ includes.append(included)
+ if "include_dirs" in self.data["__control__"]:
+ for included in [
+ os.path.join(x, "__init__.yaml") for x in
+ self.data["__control__"]["include_dirs"]
+ ]:
+ included = find_file(
+ included,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if included not in includes:
+ includes.append(included)
+ if "default_files" in self.data["__control__"]:
+ origins = self.data["__control__"]["default_files"]
+ for key in origins.keys():
+ origins[key] = find_file(
+ origins[key],
+ relative=self.filename,
+ universe=self.universe
+ )
+ if origins[key] not in includes:
+ includes.append(origins[key])
+ self.universe.default_origins[key] = origins[key]
+ if key not in self.universe.categories:
+ self.universe.categories[key] = {}
+ if "private_files" in self.data["__control__"]:
+ for item in self.data["__control__"]["private_files"]:
+ item = find_file(
+ item,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if item not in includes:
+ includes.append(item)
+ if item not in self.universe.private_files:
+ self.universe.private_files.append(item)
+ for node in list(self.data):
+ if node == "__control__":
+ continue
+ facet_pos = node.rfind(".") + 1
+ if not facet_pos:
+ mudpy.misc.Element(node, self.universe, self.filename,
+ old_style=True)
+ else:
+ prefix = node[:facet_pos].strip(".")
+ try:
+ element = self.universe.contents[prefix]
+ except KeyError:
+ element = mudpy.misc.Element(prefix, self.universe,
+ self.filename)
+ element.set(node[facet_pos:], self.data[node])
+ for include_file in includes:
+ if not os.path.isabs(include_file):
+ include_file = find_file(
+ include_file,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if (include_file not in self.universe.files or not
+ self.universe.files[include_file].is_writeable()):
+ DataFile(include_file, self.universe)
+
+ def save(self):
+ """Write the data, if necessary."""
+ normal_umask = 0o0022
+ private_umask = 0o0077
+ private_file_mode = 0o0600
+
+ # when modified, writeable and has content or the file exists
+ if self.modified and self.is_writeable() and (
+ self.data or os.path.exists(self.filename)
+ ):
+
+ # make parent directories if necessary
+ if not os.path.exists(os.path.dirname(self.filename)):
+ old_umask = os.umask(normal_umask)
+ os.makedirs(os.path.dirname(self.filename))
+ os.umask(old_umask)
+
+ # backup the file
+ if "__control__" in self.data and "backup_count" in self.data[
+ "__control__"]:
+ max_count = self.data["__control__"]["backup_count"]
+ else:
+ max_count = self.universe.contents["mudpy.limit"].get(
+ "backups")
+ if os.path.exists(self.filename) and max_count:
+ backups = []
+ for candidate in os.listdir(os.path.dirname(self.filename)):
+ if re.match(
+ os.path.basename(self.filename) +
+ """\.\d+$""", candidate
+ ):
+ backups.append(int(candidate.split(".")[-1]))
+ backups.sort()
+ backups.reverse()
+ for old_backup in backups:
+ if old_backup >= max_count - 1:
+ os.remove(self.filename + "." + str(old_backup))
+ elif not os.path.exists(
+ self.filename + "." + str(old_backup + 1)
+ ):
+ os.rename(
+ self.filename + "." + str(old_backup),
+ self.filename + "." + str(old_backup + 1)
+ )
+ if not os.path.exists(self.filename + ".0"):
+ os.rename(self.filename, self.filename + ".0")
+
+ # our data file
+ if self.filename in self.universe.private_files:
+ old_umask = os.umask(private_umask)
+ file_descriptor = open(self.filename, "w")
+ if oct(stat.S_IMODE(os.stat(
+ self.filename)[stat.ST_MODE])) != private_file_mode:
+ # if it's marked private, chmod it appropriately
+ os.chmod(self.filename, private_file_mode)
+ else:
+ old_umask = os.umask(normal_umask)
+ file_descriptor = open(self.filename, "w")
+ os.umask(old_umask)
+
+ # write and close the file
+ yaml.safe_dump(self.data, allow_unicode=True,
+ default_flow_style=False, stream=file_descriptor)
+ file_descriptor.close()
+
+ # unset the modified flag
+ self.modified = False
+
+ def is_writeable(self):
+ """Returns True if the __control__ read_only is False."""
+ try:
+ return not self.data["__control__"].get("read_only", False)
+ except KeyError:
+ return True
+