X-Git-Url: https://mudpy.org/gitweb?p=mudpy.git;a=blobdiff_plain;f=lib%2Fmudpy%2Fdata.py;h=02a913981f9b5b2ce5747e2e1503d288d2619c16;hp=79601769cf181334c27519ef40b73ced192a1f2f;hb=f2e017445df6026bcde0000b2490caee4ae1e7e3;hpb=ad484399f27d5f97fd879f0df794eb09416f4254 diff --git a/lib/mudpy/data.py b/lib/mudpy/data.py index 7960176..02a9139 100644 --- a/lib/mudpy/data.py +++ b/lib/mudpy/data.py @@ -1,298 +1,303 @@ -# -*- coding: utf-8 -*- -u"""Data interface functions for the mudpy engine.""" +"""Data interface functions for the mudpy engine.""" -# Copyright (c) 2004-2010 Jeremy Stanley . Permission +# Copyright (c) 2004-2016 Jeremy Stanley . Permission # to use, copy, modify, and distribute this software is granted under # terms provided in the LICENSE file distributed with this software. +import os +import re +import stat + +import mudpy +import yaml + + class DataFile: - u"""A file containing universe elements.""" - def __init__(self, filename, universe): - self.filename = filename - self.universe = universe - self.load() - def load(self): - u"""Read a file and create elements accordingly.""" - import ConfigParser, misc, os, os.path - self.data = ConfigParser.RawConfigParser() - self.modified = False - if os.access(self.filename, os.R_OK): self.data.read(self.filename) - if not hasattr(self.universe, u"files"): self.universe.files = {} - self.universe.files[self.filename] = self - includes = [] - if self.data.has_option(u"__control__", u"include_files"): - for included in makelist( - self.data.get(u"__control__", u"include_files") - ): - included = find_file( - included, - relative=self.filename, - universe=self.universe - ) - if included not in includes: includes.append(included) - if self.data.has_option(u"__control__", u"include_dirs"): - for included in [ - os.path.join(x, u"__init__.mpy") for x in makelist( - self.data.get(u"__control__", u"include_dirs") - ) - ]: - included = find_file( - included, - relative=self.filename, - universe=self.universe - ) - if included not in includes: includes.append(included) - if self.data.has_option(u"__control__", u"default_files"): - origins = makedict( - self.data.get(u"__control__", u"default_files") - ) - for key in origins.keys(): - origins[key] = find_file( - origins[key], - relative=self.filename, - universe=self.universe - ) - if origins[key] not in includes: includes.append(origins[key]) - self.universe.default_origins[key] = origins[key] - if key not in self.universe.categories: - self.universe.categories[key] = {} - if self.data.has_option(u"__control__", u"private_files"): - for item in makelist( - self.data.get(u"__control__", u"private_files") - ): - item = find_file( - item, - relative=self.filename, - universe=self.universe - ) - if item not in includes: includes.append(item) - if item not in self.universe.private_files: - self.universe.private_files.append(item) - for section in self.data.sections(): - if section != u"__control__": - misc.Element(section, self.universe, self.filename) - for include_file in includes: - if not os.path.isabs(include_file): - include_file = find_file( - include_file, - relative=self.filename, - universe=self.universe - ) - if include_file not in self.universe.files or not self.universe.files[ - include_file - ].is_writeable(): - DataFile(include_file, self.universe) - def save(self): - u"""Write the data, if necessary.""" - import codecs, os, os.path, re, stat - - # when modified, writeable and has content or the file exists - if self.modified and self.is_writeable() and ( - self.data.sections() or os.path.exists(self.filename) - ): - - # make parent directories if necessary - if not os.path.exists(os.path.dirname(self.filename)): - os.makedirs(os.path.dirname(self.filename)) - - # backup the file - if self.data.has_option(u"__control__", u"backup_count"): - max_count = self.data.has_option(u"__control__", u"backup_count") - else: - max_count = self.universe.categories[ - u"internal" - ][ - u"limits" - ].getint(u"default_backup_count") - if os.path.exists(self.filename) and max_count: - backups = [] - for candidate in os.listdir(os.path.dirname(self.filename)): - if re.match( - os.path.basename(self.filename) + u"""\.\d+$""", candidate - ): - backups.append(int(candidate.split(u".")[-1])) - backups.sort() - backups.reverse() - for old_backup in backups: - if old_backup >= max_count-1: - os.remove(self.filename+u"."+unicode(old_backup)) - elif not os.path.exists( - self.filename+u"."+unicode(old_backup+1) - ): - os.rename( - self.filename + u"."+unicode(old_backup), - self.filename + u"."+unicode( old_backup + 1 ) - ) - if not os.path.exists(self.filename+u".0"): - os.rename( self.filename, self.filename + u".0" ) - - # our data file - file_descriptor = codecs.open(self.filename, u"w", u"utf-8") - - # if it's marked private, chmod it appropriately - if self.filename in self.universe.private_files and oct( - stat.S_IMODE( os.stat(self.filename)[stat.ST_MODE] ) - ) != 0600: - os.chmod(self.filename, 0600) - - # write it back sorted, instead of using ConfigParser - sections = self.data.sections() - sections.sort() - for section in sections: - file_descriptor.write(u"[" + section + u"]\n") - options = self.data.options(section) - options.sort() - for option in options: - file_descriptor.write( - option + u" = " + self.data.get(section, option) + u"\n" - ) - file_descriptor.write(u"\n") - - # flush and close the file - file_descriptor.flush() - file_descriptor.close() - - # unset the modified flag - self.modified = False - def is_writeable(self): - u"""Returns True if the __control__ read_only is False.""" - return not self.data.has_option( - u"__control__", u"read_only" - ) or not self.data.getboolean( - u"__control__", u"read_only" - ) + + """A file containing universe elements and their facets.""" + + def __init__(self, filename, universe): + self.filename = filename + self.universe = universe + self.data = {} + self.load() + + def load(self): + """Read a file, create elements and poplulate facets accordingly.""" + self.modified = False + try: + self.data = yaml.safe_load(open(self.filename)) + except FileNotFoundError: + # it's normal if the file is one which doesn't exist yet + log_entry = ("File %s is unavailable." % self.filename, 6) + try: + mudpy.misc.log(*log_entry) + except NameError: + # happens when we're not far enough along in the init process + self.universe.setup_loglines.append(log_entry) + if not hasattr(self.universe, "files"): + self.universe.files = {} + self.universe.files[self.filename] = self + includes = [] + if "__control__" in self.data: + if "include_files" in self.data["__control__"]: + for included in self.data["__control__"]["include_files"]: + included = find_file( + included, + relative=self.filename, + universe=self.universe) + if included not in includes: + includes.append(included) + if "include_dirs" in self.data["__control__"]: + for included in [ + os.path.join(x, "__init__.yaml") for x in + self.data["__control__"]["include_dirs"] + ]: + included = find_file( + included, + relative=self.filename, + universe=self.universe + ) + if included not in includes: + includes.append(included) + if "default_files" in self.data["__control__"]: + origins = self.data["__control__"]["default_files"] + for key in origins.keys(): + origins[key] = find_file( + origins[key], + relative=self.filename, + universe=self.universe + ) + if origins[key] not in includes: + includes.append(origins[key]) + self.universe.default_origins[key] = origins[key] + if key not in self.universe.categories: + self.universe.categories[key] = {} + if "private_files" in self.data["__control__"]: + for item in self.data["__control__"]["private_files"]: + item = find_file( + item, + relative=self.filename, + universe=self.universe + ) + if item not in includes: + includes.append(item) + if item not in self.universe.private_files: + self.universe.private_files.append(item) + for node in list(self.data): + if node == "__control__": + continue + facet_pos = node.rfind(".") + 1 + if not facet_pos: + mudpy.misc.Element(node, self.universe, self.filename, + old_style=True) + else: + prefix = node[:facet_pos].strip(".") + try: + element = self.universe.contents[prefix] + except KeyError: + element = mudpy.misc.Element(prefix, self.universe, + self.filename) + element.set(node[facet_pos:], self.data[node]) + for include_file in includes: + if not os.path.isabs(include_file): + include_file = find_file( + include_file, + relative=self.filename, + universe=self.universe + ) + if (include_file not in self.universe.files or not + self.universe.files[include_file].is_writeable()): + DataFile(include_file, self.universe) + + def save(self): + """Write the data, if necessary.""" + normal_umask = 0o0022 + private_umask = 0o0077 + private_file_mode = 0o0600 + + # when modified, writeable and has content or the file exists + if self.modified and self.is_writeable() and ( + self.data or os.path.exists(self.filename) + ): + + # make parent directories if necessary + if not os.path.exists(os.path.dirname(self.filename)): + old_umask = os.umask(normal_umask) + os.makedirs(os.path.dirname(self.filename)) + os.umask(old_umask) + + # backup the file + if "__control__" in self.data and "backup_count" in self.data[ + "__control__"]: + max_count = self.data["__control__"]["backup_count"] + else: + max_count = self.universe.contents["mudpy.limit"].get( + "backups") + if os.path.exists(self.filename) and max_count: + backups = [] + for candidate in os.listdir(os.path.dirname(self.filename)): + if re.match( + os.path.basename(self.filename) + + """\.\d+$""", candidate + ): + backups.append(int(candidate.split(".")[-1])) + backups.sort() + backups.reverse() + for old_backup in backups: + if old_backup >= max_count - 1: + os.remove(self.filename + "." + str(old_backup)) + elif not os.path.exists( + self.filename + "." + str(old_backup + 1) + ): + os.rename( + self.filename + "." + str(old_backup), + self.filename + "." + str(old_backup + 1) + ) + if not os.path.exists(self.filename + ".0"): + os.rename(self.filename, self.filename + ".0") + + # our data file + if self.filename in self.universe.private_files: + old_umask = os.umask(private_umask) + file_descriptor = open(self.filename, "w") + if oct(stat.S_IMODE(os.stat( + self.filename)[stat.ST_MODE])) != private_file_mode: + # if it's marked private, chmod it appropriately + os.chmod(self.filename, private_file_mode) + else: + old_umask = os.umask(normal_umask) + file_descriptor = open(self.filename, "w") + os.umask(old_umask) + + # write and close the file + yaml.safe_dump(self.data, allow_unicode=True, + default_flow_style=False, stream=file_descriptor) + file_descriptor.close() + + # unset the modified flag + self.modified = False + + def is_writeable(self): + """Returns True if the __control__ read_only is False.""" + try: + return not self.data["__control__"].get("read_only", False) + except KeyError: + return True + def find_file( - file_name=None, - root_path=None, - search_path=None, - default_dir=None, - relative=None, - universe=None + file_name=None, + root_path=None, + search_path=None, + default_dir=None, + relative=None, + universe=None ): - u"""Return an absolute file path based on configuration.""" - import os, os.path, sys - - # make sure to get rid of any surrounding quotes first thing - if file_name: file_name = file_name.strip(u"\"'") - - # this is all unnecessary if it's already absolute - if file_name and os.path.isabs(file_name): - return os.path.realpath(file_name) - - # when no file name is specified, look for .conf - elif not file_name: file_name = os.path.basename( sys.argv[0] ) + u".conf" - - # if a universe was provided, try to get some defaults from there - if universe: - - if hasattr( - universe, - u"contents" - ) and u"internal:storage" in universe.contents: - storage = universe.categories[u"internal"][u"storage"] - if not root_path: root_path = storage.get(u"root_path").strip("\"'") - if not search_path: search_path = storage.getlist(u"search_path") - if not default_dir: - default_dir = storage.get(u"default_dir").strip("\"'") - - # if there's only one file loaded, try to work around a chicken 0: return eval(u"{" + value + u"}") - else: return { value: None } + """Return an absolute file path based on configuration.""" + + # make sure to get rid of any surrounding quotes first thing + if file_name: + file_name = file_name.strip("\"'") + + # this is all unnecessary if it's already absolute + if file_name and os.path.isabs(file_name): + return os.path.realpath(file_name) + + # if a universe was provided, try to get some defaults from there + if universe: + + if hasattr( + universe, + "contents" + ) and "internal:storage" in universe.contents: + storage = universe.categories["internal"]["storage"] + if not root_path: + root_path = storage.get("root_path").strip("\"'") + if not search_path: + search_path = storage.get("search_path") + if not default_dir: + default_dir = storage.get("default_dir").strip("\"'") + + # if there's only one file loaded, try to work around a chicken