Move lib/mudpy to mudpy for packaging preparation
[mudpy.git] / lib / mudpy / data.py
diff --git a/lib/mudpy/data.py b/lib/mudpy/data.py
deleted file mode 100644 (file)
index 00ab923..0000000
+++ /dev/null
@@ -1,306 +0,0 @@
-"""Data interface functions for the mudpy engine."""
-
-# Copyright (c) 2004-2016 Jeremy Stanley <fungi@yuggoth.org>. Permission
-# to use, copy, modify, and distribute this software is granted under
-# terms provided in the LICENSE file distributed with this software.
-
-import os
-import re
-import stat
-
-import mudpy
-import yaml
-
-
-class DataFile:
-
-    """A file containing universe elements and their facets."""
-
-    def __init__(self, filename, universe):
-        self.filename = filename
-        self.universe = universe
-        self.data = {}
-        self.load()
-
-    def load(self):
-        """Read a file, create elements and poplulate facets accordingly."""
-        self.modified = False
-        try:
-            self.data = yaml.safe_load(open(self.filename))
-        except FileNotFoundError:
-            # it's normal if the file is one which doesn't exist yet
-            log_entry = ("File %s is unavailable." % self.filename, 6)
-            try:
-                mudpy.misc.log(*log_entry)
-            except NameError:
-                # happens when we're not far enough along in the init process
-                self.universe.setup_loglines.append(log_entry)
-        if not hasattr(self.universe, "files"):
-            self.universe.files = {}
-        self.universe.files[self.filename] = self
-        includes = []
-        if "__control__" in self.data:
-            if "include_files" in self.data["__control__"]:
-                for included in self.data["__control__"]["include_files"]:
-                    included = find_file(
-                        included,
-                        relative=self.filename,
-                        universe=self.universe)
-                    if included not in includes:
-                        includes.append(included)
-            if "include_dirs" in self.data["__control__"]:
-                for included in [
-                    os.path.join(x, "__init__.yaml") for x in
-                        self.data["__control__"]["include_dirs"]
-                ]:
-                    included = find_file(
-                        included,
-                        relative=self.filename,
-                        universe=self.universe
-                    )
-                    if included not in includes:
-                        includes.append(included)
-            if "default_files" in self.data["__control__"]:
-                origins = self.data["__control__"]["default_files"]
-                for key in origins.keys():
-                    origins[key] = find_file(
-                        origins[key],
-                        relative=self.filename,
-                        universe=self.universe
-                    )
-                    if origins[key] not in includes:
-                        includes.append(origins[key])
-                    self.universe.default_origins[key] = origins[key]
-                    if key not in self.universe.categories:
-                        self.universe.categories[key] = {}
-            if "private_files" in self.data["__control__"]:
-                for item in self.data["__control__"]["private_files"]:
-                    item = find_file(
-                        item,
-                        relative=self.filename,
-                        universe=self.universe
-                    )
-                    if item not in includes:
-                        includes.append(item)
-                    if item not in self.universe.private_files:
-                        self.universe.private_files.append(item)
-        for node in list(self.data):
-            if node == "__control__":
-                continue
-            facet_pos = node.rfind(".") + 1
-            if not facet_pos:
-                mudpy.misc.Element(node, self.universe, self.filename,
-                                   old_style=True)
-            else:
-                prefix = node[:facet_pos].strip(".")
-                try:
-                    element = self.universe.contents[prefix]
-                except KeyError:
-                    element = mudpy.misc.Element(prefix, self.universe,
-                        self.filename)
-                element.set(node[facet_pos:], self.data[node])
-                if prefix.startswith("mudpy.movement."):
-                    self.universe.directions.add(
-                        prefix[prefix.rfind(".") + 1:])
-        for include_file in includes:
-            if not os.path.isabs(include_file):
-                include_file = find_file(
-                    include_file,
-                    relative=self.filename,
-                    universe=self.universe
-                )
-            if (include_file not in self.universe.files or not
-                    self.universe.files[include_file].is_writeable()):
-                DataFile(include_file, self.universe)
-
-    def save(self):
-        """Write the data, if necessary."""
-        normal_umask = 0o0022
-        private_umask = 0o0077
-        private_file_mode = 0o0600
-
-        # when modified, writeable and has content or the file exists
-        if self.modified and self.is_writeable() and (
-           self.data or os.path.exists(self.filename)
-           ):
-
-            # make parent directories if necessary
-            if not os.path.exists(os.path.dirname(self.filename)):
-                old_umask = os.umask(normal_umask)
-                os.makedirs(os.path.dirname(self.filename))
-                os.umask(old_umask)
-
-            # backup the file
-            if "__control__" in self.data and "backup_count" in self.data[
-                    "__control__"]:
-                max_count = self.data["__control__"]["backup_count"]
-            else:
-                max_count = self.universe.contents["mudpy.limit"].get(
-                    "backups")
-            if os.path.exists(self.filename) and max_count:
-                backups = []
-                for candidate in os.listdir(os.path.dirname(self.filename)):
-                    if re.match(
-                       os.path.basename(self.filename) +
-                       """\.\d+$""", candidate
-                       ):
-                        backups.append(int(candidate.split(".")[-1]))
-                backups.sort()
-                backups.reverse()
-                for old_backup in backups:
-                    if old_backup >= max_count - 1:
-                        os.remove(self.filename + "." + str(old_backup))
-                    elif not os.path.exists(
-                        self.filename + "." + str(old_backup + 1)
-                    ):
-                        os.rename(
-                            self.filename + "." + str(old_backup),
-                            self.filename + "." + str(old_backup + 1)
-                        )
-                if not os.path.exists(self.filename + ".0"):
-                    os.rename(self.filename, self.filename + ".0")
-
-            # our data file
-            if self.filename in self.universe.private_files:
-                old_umask = os.umask(private_umask)
-                file_descriptor = open(self.filename, "w")
-                if oct(stat.S_IMODE(os.stat(
-                        self.filename)[stat.ST_MODE])) != private_file_mode:
-                    # if it's marked private, chmod it appropriately
-                    os.chmod(self.filename, private_file_mode)
-            else:
-                old_umask = os.umask(normal_umask)
-                file_descriptor = open(self.filename, "w")
-            os.umask(old_umask)
-
-            # write and close the file
-            yaml.safe_dump(self.data, allow_unicode=True,
-                           default_flow_style=False, stream=file_descriptor)
-            file_descriptor.close()
-
-            # unset the modified flag
-            self.modified = False
-
-    def is_writeable(self):
-        """Returns True if the __control__ read_only is False."""
-        try:
-            return not self.data["__control__"].get("read_only", False)
-        except KeyError:
-            return True
-
-
-def find_file(
-    file_name=None,
-    root_path=None,
-    search_path=None,
-    default_dir=None,
-    relative=None,
-    universe=None
-):
-    """Return an absolute file path based on configuration."""
-
-    # make sure to get rid of any surrounding quotes first thing
-    if file_name:
-        file_name = file_name.strip("\"'")
-
-    # this is all unnecessary if it's already absolute
-    if file_name and os.path.isabs(file_name):
-        return os.path.realpath(file_name)
-
-    # if a universe was provided, try to get some defaults from there
-    if universe:
-
-        if hasattr(
-           universe,
-           "contents"
-           ) and "internal:storage" in universe.contents:
-            storage = universe.categories["internal"]["storage"]
-            if not root_path:
-                root_path = storage.get("root_path").strip("\"'")
-            if not search_path:
-                search_path = storage.get("search_path")
-            if not default_dir:
-                default_dir = storage.get("default_dir").strip("\"'")
-
-        # if there's only one file loaded, try to work around a chicken<egg
-        elif hasattr(universe, "files") and len(
-            universe.files
-        ) == 1 and not universe.files[
-                list(universe.files.keys())[0]].is_writeable():
-            data_file = universe.files[list(universe.files.keys())[0]].data
-
-            # try for a fallback default directory
-            if not default_dir:
-                default_dir = data_file.get(
-                    "internal:storage", "").get("default_dir", "")
-
-            # try for a fallback root path
-            if not root_path:
-                root_path = data_file.get(
-                    "internal:storage", "").get("root_path", "")
-
-            # try for a fallback search path
-            if not search_path:
-                search_path = data_file.get(
-                    "internal:storage", "").get("search_path", "")
-
-        # another fallback root path, this time from the universe startdir
-        if not root_path and hasattr(universe, "startdir"):
-            root_path = universe.startdir
-
-    # when no root path is specified, assume the current working directory
-    if not root_path:
-        root_path = os.getcwd()
-
-    # otherwise, make sure it's absolute
-    elif not os.path.isabs(root_path):
-        root_path = os.path.realpath(root_path)
-
-    # if there's no search path, just use the root path and etc
-    if not search_path:
-        search_path = [root_path, "etc"]
-
-    # work on a copy of the search path, to avoid modifying the caller's
-    else:
-        search_path = search_path[:]
-
-    # if there's no default path, use the last component of the search path
-    if not default_dir:
-        default_dir = search_path[-1]
-
-    # if an existing file or directory reference was supplied, prepend it
-    if relative:
-        relative = relative.strip("\"'")
-        if os.path.isdir(relative):
-            search_path = [relative] + search_path
-        else:
-            search_path = [os.path.dirname(relative)] + search_path
-
-    # make the search path entries absolute and throw away any dupes
-    clean_search_path = []
-    for each_path in search_path:
-        each_path = each_path.strip("\"'")
-        if not os.path.isabs(each_path):
-            each_path = os.path.realpath(os.path.join(root_path, each_path))
-        if each_path not in clean_search_path:
-            clean_search_path.append(each_path)
-
-    # start hunting for the file now
-    for each_path in clean_search_path:
-
-        # if the file exists and is readable, we're done
-        if os.path.isfile(os.path.join(each_path, file_name)):
-            file_name = os.path.realpath(os.path.join(each_path, file_name))
-            break
-
-    # it didn't exist after all, so use the default path instead
-    if not os.path.isabs(file_name):
-        file_name = os.path.join(default_dir, file_name)
-    if not os.path.isabs(file_name):
-        file_name = os.path.join(root_path, file_name)
-
-    # and normalize it last thing before returning
-    file_name = os.path.realpath(file_name)
-
-    # normalize the resulting file path and hand it back
-    return file_name