+ """A file containing universe elements."""
+
+ def __init__(self, filename, universe):
+ self.filename = filename
+ self.universe = universe
+ self.load()
+
+ def load(self):
+ """Read a file and create elements accordingly."""
+ # TODO(fungi): remove this indirection after the YAML transition
+ if self.filename.endswith('.yaml'):
+ self.load_yaml()
+ else:
+ self.load_mpy()
+
+ def load_yaml(self):
+ """Read a file and create elements accordingly."""
+ # TODO(fungi): remove this parameter after the YAML transition
+ self._format = 'yaml'
+ self.modified = False
+ try:
+ self.data = yaml.load(open(self.filename))
+ except FileNotFoundError:
+ # it's normal if the file is one which doesn't exist yet
+ try:
+ mudpy.misc.log("Couldn't read %s file." % self.filename, 6)
+ except NameError:
+ # happens when we're not far enough along in the init process
+ pass
+ if not hasattr(self.universe, "files"):
+ self.universe.files = {}
+ self.universe.files[self.filename] = self
+ includes = []
+ if "__control__" in self.data:
+ if "include_files" in self.data["__control__"]:
+ for included in makelist(
+ self.data["__control__"]["include_files"]):
+ included = find_file(
+ included,
+ relative=self.filename,
+ universe=self.universe)
+ if included not in includes:
+ includes.append(included)
+ if "include_dirs" in self.data["__control__"]:
+ for included in [
+ os.path.join(x, "__init__.mpy") for x in makelist(
+ self.data["__control__"]["include_dirs"]
+ )
+ ]:
+ included = find_file(
+ included,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if included not in includes:
+ includes.append(included)
+ if "default_files" in self.data["__control__"]:
+ origins = makedict(
+ self.data["__control__"]["default_files"]
+ )
+ for key in origins.keys():
+ origins[key] = find_file(
+ origins[key],
+ relative=self.filename,
+ universe=self.universe
+ )
+ if origins[key] not in includes:
+ includes.append(origins[key])
+ self.universe.default_origins[key] = origins[key]
+ if key not in self.universe.categories:
+ self.universe.categories[key] = {}
+ if "private_files" in self.data["__control__"]:
+ for item in makelist(
+ self.data["__control__"]["private_files"]
+ ):
+ item = find_file(
+ item,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if item not in includes:
+ includes.append(item)
+ if item not in self.universe.private_files:
+ self.universe.private_files.append(item)
+ for element in self.data:
+ if element != "__control__":
+ mudpy.misc.Element(element, self.universe, self.filename)
+ for include_file in includes:
+ if not os.path.isabs(include_file):
+ include_file = find_file(
+ include_file,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if (include_file not in self.universe.files or not
+ self.universe.files[include_file].is_writeable()):
+ DataFile(include_file, self.universe)
+
+ # TODO(fungi): remove this method after the YAML transition
+ def load_mpy(self):
+ """Read a file and create elements accordingly."""
+ self._format = 'mpy'
+ self.modified = False
+ self.data = configparser.RawConfigParser()
+ if os.access(self.filename, os.R_OK):
+ self.data.read(self.filename)
+ if not hasattr(self.universe, "files"):
+ self.universe.files = {}
+ self.universe.files[self.filename] = self
+ includes = []
+ if self.data.has_option("__control__", "include_files"):
+ for included in makelist(
+ self.data.get("__control__", "include_files")
+ ):
+ included = find_file(
+ included,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if included not in includes:
+ includes.append(included)
+ if self.data.has_option("__control__", "include_dirs"):
+ for included in [
+ os.path.join(x, "__init__.mpy") for x in makelist(
+ self.data.get("__control__", "include_dirs")
+ )
+ ]:
+ included = find_file(
+ included,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if included not in includes:
+ includes.append(included)
+ if self.data.has_option("__control__", "default_files"):
+ origins = makedict(
+ self.data.get("__control__", "default_files")
+ )
+ for key in origins.keys():
+ origins[key] = find_file(
+ origins[key],
+ relative=self.filename,
+ universe=self.universe
+ )
+ if origins[key] not in includes:
+ includes.append(origins[key])
+ self.universe.default_origins[key] = origins[key]
+ if key not in self.universe.categories:
+ self.universe.categories[key] = {}
+ if self.data.has_option("__control__", "private_files"):
+ for item in makelist(
+ self.data.get("__control__", "private_files")
+ ):
+ item = find_file(
+ item,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if item not in includes:
+ includes.append(item)
+ if item not in self.universe.private_files:
+ self.universe.private_files.append(item)
+ for section in self.data.sections():
+ if section != "__control__":
+ mudpy.misc.Element(section, self.universe, self.filename)
+ for include_file in includes:
+ if not os.path.isabs(include_file):
+ include_file = find_file(
+ include_file,
+ relative=self.filename,
+ universe=self.universe
+ )
+ if (include_file not in self.universe.files or not
+ self.universe.files[include_file].is_writeable()):
+ DataFile(include_file, self.universe)
+
+ # TODO(fungi): this should support writing YAML
+ def save(self):
+ """Write the data, if necessary."""
+
+ # when modified, writeable and has content or the file exists
+ if self.modified and self.is_writeable() and (
+ self.data.sections() or os.path.exists(self.filename)
+ ):
+
+ # make parent directories if necessary
+ if not os.path.exists(os.path.dirname(self.filename)):
+ os.makedirs(os.path.dirname(self.filename))
+
+ # backup the file
+ if self.data.has_option("__control__", "backup_count"):
+ max_count = self.data.has_option(
+ "__control__", "backup_count")
+ else:
+ max_count = self.universe.categories[
+ "internal"
+ ][
+ "limits"
+ ].getint("default_backup_count")
+ if os.path.exists(self.filename) and max_count:
+ backups = []
+ for candidate in os.listdir(os.path.dirname(self.filename)):
+ if re.match(
+ os.path.basename(self.filename) +
+ """\.\d+$""", candidate
+ ):
+ backups.append(int(candidate.split(".")[-1]))
+ backups.sort()
+ backups.reverse()
+ for old_backup in backups:
+ if old_backup >= max_count - 1:
+ os.remove(self.filename + "." + old_backup)
+ elif not os.path.exists(
+ self.filename + "." + old_backup + 1
+ ):
+ os.rename(
+ self.filename + "." + old_backup,
+ self.filename + "." + old_backup + 1
+ )
+ if not os.path.exists(self.filename + ".0"):
+ os.rename(self.filename, self.filename + ".0")
+
+ # our data file
+ file_descriptor = codecs.open(self.filename, "w", "utf-8")
+
+ # if it's marked private, chmod it appropriately
+ if self.filename in self.universe.private_files and oct(
+ stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
+ ) != 0o0600:
+ os.chmod(self.filename, 0o0600)
+
+ # write it back sorted, instead of using configparser
+ sections = self.data.sections()
+ sections.sort()
+ for section in sections:
+ file_descriptor.write("[" + section + "]\n")
+ options = self.data.options(section)
+ options.sort()
+ for option in options:
+ file_descriptor.write(
+ option + " = " +
+ self.data.get(section, option) + "\n"
+ )
+ file_descriptor.write("\n")
+
+ # flush and close the file
+ file_descriptor.flush()
+ file_descriptor.close()
+
+ # unset the modified flag
+ self.modified = False
+
+ # TODO(fungi): this should support writing YAML
+ def is_writeable(self):
+ """Returns True if the __control__ read_only is False."""
+ return not self.data.has_option(
+ "__control__", "read_only"
+ ) or not self.data.getboolean(
+ "__control__", "read_only"
+ )