-# -*- coding: utf-8 -*-
"""Data interface functions for the mudpy engine."""
-# Copyright (c) 2004-2014 Jeremy Stanley <fungi@yuggoth.org>. Permission
+# Copyright (c) 2004-2016 Jeremy Stanley <fungi@yuggoth.org>. Permission
# to use, copy, modify, and distribute this software is granted under
# terms provided in the LICENSE file distributed with this software.
-import codecs
-import configparser
import os
import re
import stat
-import sys
import mudpy
import yaml
class DataFile:
- """A file containing universe elements."""
+ """A file containing universe elements and their facets."""
def __init__(self, filename, universe):
self.filename = filename
self.load()
def load(self):
- """Read a file and create elements accordingly."""
- # TODO(fungi): remove this indirection after the YAML transition
- if self.filename.endswith('.yaml'):
- self.load_yaml()
- else:
- self.load_mpy()
-
- def load_yaml(self):
- """Read a file and create elements accordingly."""
- # TODO(fungi): remove this parameter after the YAML transition
- self._format = 'yaml'
+ """Read a file, create elements and poplulate facets accordingly."""
self.modified = False
try:
- self.data = yaml.load(open(self.filename))
+ self.data = yaml.safe_load(open(self.filename))
except FileNotFoundError:
# it's normal if the file is one which doesn't exist yet
+ log_entry = ("File %s is unavailable." % self.filename, 6)
try:
- mudpy.misc.log("Couldn't read %s file." % self.filename, 6)
+ mudpy.misc.log(*log_entry)
except NameError:
# happens when we're not far enough along in the init process
- pass
+ self.universe.setup_loglines.append(log_entry)
if not hasattr(self.universe, "files"):
self.universe.files = {}
self.universe.files[self.filename] = self
includes = []
if "__control__" in self.data:
if "include_files" in self.data["__control__"]:
- for included in makelist(
- self.data["__control__"]["include_files"]):
+ for included in self.data["__control__"]["include_files"]:
included = find_file(
included,
relative=self.filename,
includes.append(included)
if "include_dirs" in self.data["__control__"]:
for included in [
- os.path.join(x, "__init__.mpy") for x in makelist(
+ os.path.join(x, "__init__.yaml") for x in
self.data["__control__"]["include_dirs"]
- )
]:
included = find_file(
included,
if included not in includes:
includes.append(included)
if "default_files" in self.data["__control__"]:
- origins = makedict(
- self.data["__control__"]["default_files"]
- )
+ origins = self.data["__control__"]["default_files"]
for key in origins.keys():
origins[key] = find_file(
origins[key],
if key not in self.universe.categories:
self.universe.categories[key] = {}
if "private_files" in self.data["__control__"]:
- for item in makelist(
- self.data["__control__"]["private_files"]
- ):
+ for item in self.data["__control__"]["private_files"]:
item = find_file(
item,
relative=self.filename,
includes.append(item)
if item not in self.universe.private_files:
self.universe.private_files.append(item)
- for element in self.data:
- if element != "__control__":
- mudpy.misc.Element(element, self.universe, self.filename)
+ for node in self.data:
+ if node != "__control__":
+ mudpy.misc.Element(node, self.universe, self.filename)
for include_file in includes:
if not os.path.isabs(include_file):
include_file = find_file(
self.universe.files[include_file].is_writeable()):
DataFile(include_file, self.universe)
- # TODO(fungi): remove this method after the YAML transition
- def load_mpy(self):
- """Read a file and create elements accordingly."""
- self._format = 'mpy'
- self.modified = False
- self.data = configparser.RawConfigParser()
- if os.access(self.filename, os.R_OK):
- self.data.read(self.filename)
- if not hasattr(self.universe, "files"):
- self.universe.files = {}
- self.universe.files[self.filename] = self
- includes = []
- if self.data.has_option("__control__", "include_files"):
- for included in makelist(
- self.data.get("__control__", "include_files")
- ):
- included = find_file(
- included,
- relative=self.filename,
- universe=self.universe
- )
- if included not in includes:
- includes.append(included)
- if self.data.has_option("__control__", "include_dirs"):
- for included in [
- os.path.join(x, "__init__.mpy") for x in makelist(
- self.data.get("__control__", "include_dirs")
- )
- ]:
- included = find_file(
- included,
- relative=self.filename,
- universe=self.universe
- )
- if included not in includes:
- includes.append(included)
- if self.data.has_option("__control__", "default_files"):
- origins = makedict(
- self.data.get("__control__", "default_files")
- )
- for key in origins.keys():
- origins[key] = find_file(
- origins[key],
- relative=self.filename,
- universe=self.universe
- )
- if origins[key] not in includes:
- includes.append(origins[key])
- self.universe.default_origins[key] = origins[key]
- if key not in self.universe.categories:
- self.universe.categories[key] = {}
- if self.data.has_option("__control__", "private_files"):
- for item in makelist(
- self.data.get("__control__", "private_files")
- ):
- item = find_file(
- item,
- relative=self.filename,
- universe=self.universe
- )
- if item not in includes:
- includes.append(item)
- if item not in self.universe.private_files:
- self.universe.private_files.append(item)
- for section in self.data.sections():
- if section != "__control__":
- mudpy.misc.Element(section, self.universe, self.filename)
- for include_file in includes:
- if not os.path.isabs(include_file):
- include_file = find_file(
- include_file,
- relative=self.filename,
- universe=self.universe
- )
- if (include_file not in self.universe.files or not
- self.universe.files[include_file].is_writeable()):
- DataFile(include_file, self.universe)
-
- # TODO(fungi): this should support writing YAML
def save(self):
"""Write the data, if necessary."""
+ normal_umask = 0o0022
+ private_umask = 0o0077
+ private_file_mode = 0o0600
# when modified, writeable and has content or the file exists
if self.modified and self.is_writeable() and (
- self.data.sections() or os.path.exists(self.filename)
+ self.data or os.path.exists(self.filename)
):
# make parent directories if necessary
if not os.path.exists(os.path.dirname(self.filename)):
+ old_umask = os.umask(normal_umask)
os.makedirs(os.path.dirname(self.filename))
+ os.umask(old_umask)
# backup the file
- if self.data.has_option("__control__", "backup_count"):
- max_count = self.data.has_option(
- "__control__", "backup_count")
+ if "__control__" in self.data and "backup_count" in self.data[
+ "__control__"]:
+ max_count = self.data["__control__"]["backup_count"]
else:
max_count = self.universe.categories[
"internal"
][
"limits"
- ].getint("default_backup_count")
+ ].get("default_backup_count")
if os.path.exists(self.filename) and max_count:
backups = []
for candidate in os.listdir(os.path.dirname(self.filename)):
backups.reverse()
for old_backup in backups:
if old_backup >= max_count - 1:
- os.remove(self.filename + "." + old_backup)
+ os.remove(self.filename + "." + str(old_backup))
elif not os.path.exists(
- self.filename + "." + old_backup + 1
+ self.filename + "." + str(old_backup + 1)
):
os.rename(
- self.filename + "." + old_backup,
- self.filename + "." + old_backup + 1
+ self.filename + "." + str(old_backup),
+ self.filename + "." + str(old_backup + 1)
)
if not os.path.exists(self.filename + ".0"):
os.rename(self.filename, self.filename + ".0")
# our data file
- file_descriptor = codecs.open(self.filename, "w", "utf-8")
-
- # if it's marked private, chmod it appropriately
- if self.filename in self.universe.private_files and oct(
- stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
- ) != 0o0600:
- os.chmod(self.filename, 0o0600)
-
- # write it back sorted, instead of using configparser
- sections = self.data.sections()
- sections.sort()
- for section in sections:
- file_descriptor.write("[" + section + "]\n")
- options = self.data.options(section)
- options.sort()
- for option in options:
- file_descriptor.write(
- option + " = " +
- self.data.get(section, option) + "\n"
- )
- file_descriptor.write("\n")
+ if self.filename in self.universe.private_files:
+ old_umask = os.umask(private_umask)
+ file_descriptor = open(self.filename, "w")
+ if oct(stat.S_IMODE(os.stat(
+ self.filename)[stat.ST_MODE])) != private_file_mode:
+ # if it's marked private, chmod it appropriately
+ os.chmod(self.filename, private_file_mode)
+ else:
+ old_umask = os.umask(normal_umask)
+ file_descriptor = open(self.filename, "w")
+ os.umask(old_umask)
- # flush and close the file
- file_descriptor.flush()
+ # write and close the file
+ yaml.safe_dump(self.data, allow_unicode=True,
+ default_flow_style=False, stream=file_descriptor)
file_descriptor.close()
# unset the modified flag
def is_writeable(self):
"""Returns True if the __control__ read_only is False."""
- # TODO(fungi): remove this indirection after the YAML transition
- if self._format == "yaml":
- try:
- return not self.data["__control__"].get("read_only", False)
- except KeyError:
- return True
- else:
- return not self.data.has_option(
- "__control__", "read_only"
- ) or not self.data.getboolean(
- "__control__", "read_only"
- )
+ try:
+ return not self.data["__control__"].get("read_only", False)
+ except KeyError:
+ return True
def find_file(
if file_name and os.path.isabs(file_name):
return os.path.realpath(file_name)
- # when no file name is specified, look for <argv[0]>.conf
- elif not file_name:
- file_name = os.path.basename(sys.argv[0]) + ".conf"
-
# if a universe was provided, try to get some defaults from there
if universe:
if not root_path:
root_path = storage.get("root_path").strip("\"'")
if not search_path:
- search_path = storage.getlist("search_path")
+ search_path = storage.get("search_path")
if not default_dir:
default_dir = storage.get("default_dir").strip("\"'")
data_file = universe.files[list(universe.files.keys())[0]].data
# try for a fallback default directory
- if not default_dir and data_file.has_option(
- "internal:storage",
- "default_dir"
- ):
+ if not default_dir:
default_dir = data_file.get(
- "internal:storage",
- "default_dir"
- ).strip("\"'")
+ "internal:storage", "").get("default_dir", "")
# try for a fallback root path
- if not root_path and data_file.has_option(
- "internal:storage",
- "root_path"
- ):
+ if not root_path:
root_path = data_file.get(
- "internal:storage",
- "root_path"
- ).strip("\"'")
+ "internal:storage", "").get("root_path", "")
# try for a fallback search path
- if not search_path and data_file.has_option(
- "internal:storage",
- "search_path"
- ):
- search_path = makelist(
- data_file.get("internal:storage",
- "search_path").strip("\"'")
- )
+ if not search_path:
+ search_path = data_file.get(
+ "internal:storage", "").get("search_path", "")
# another fallback root path, this time from the universe startdir
if not root_path and hasattr(universe, "startdir"):
else:
search_path = search_path[:]
- # if there's no default path, use the last element of the search path
+ # if there's no default path, use the last component of the search path
if not default_dir:
default_dir = search_path[-1]
# normalize the resulting file path and hand it back
return file_name
-
-
-def makelist(value):
- """Turn string into list type."""
- if value[0] + value[-1] == "[]":
- return eval(value)
- elif value[0] + value[-1] == "\"\"":
- return [value[1:-1]]
- else:
- return [value]
-
-
-def makedict(value):
- """Turn string into dict type."""
- if value[0] + value[-1] == "{}":
- return eval(value)
- elif value.find(":") > 0:
- return eval("{" + value + "}")
- else:
- return {value: None}