Begin the transition from INI to YAML
[mudpy.git] / lib / mudpy / data.py
index 3ed5e0f..db27d55 100644 (file)
@@ -1,14 +1,24 @@
 # -*- coding: utf-8 -*-
-u"""Data interface functions for the mudpy engine."""
+"""Data interface functions for the mudpy engine."""
 
-# Copyright (c) 2004-2011 Jeremy Stanley <fungi@yuggoth.org>. Permission
+# Copyright (c) 2004-2014 Jeremy Stanley <fungi@yuggoth.org>. Permission
 # to use, copy, modify, and distribute this software is granted under
 # terms provided in the LICENSE file distributed with this software.
 
+import codecs
+import configparser
+import os
+import re
+import stat
+import sys
+
+import mudpy
+import yaml
+
 
 class DataFile:
 
-    u"""A file containing universe elements."""
+    """A file containing universe elements."""
 
     def __init__(self, filename, universe):
         self.filename = filename
@@ -16,22 +26,111 @@ class DataFile:
         self.load()
 
     def load(self):
-        u"""Read a file and create elements accordingly."""
-        import ConfigParser
-        import misc
-        import os
-        import os.path
-        self.data = ConfigParser.RawConfigParser()
+        """Read a file and create elements accordingly."""
+        # TODO(fungi): remove this indirection after the YAML transition
+        if self.filename.endswith('.yaml'):
+            self.load_yaml()
+        else:
+            self.load_mpy()
+
+    def load_yaml(self):
+        """Read a file and create elements accordingly."""
+        # TODO(fungi): remove this parameter after the YAML transition
+        self._format = 'yaml'
+        self.modified = False
+        try:
+            self.data = yaml.load(open(self.filename))
+        except FileNotFoundError:
+            # it's normal if the file is one which doesn't exist yet
+            try:
+                mudpy.misc.log("Couldn't read %s file." % self.filename, 6)
+            except NameError:
+                # happens when we're not far enough along in the init process
+                pass
+        if not hasattr(self.universe, "files"):
+            self.universe.files = {}
+        self.universe.files[self.filename] = self
+        includes = []
+        if "__control__" in self.data:
+            if "include_files" in self.data["__control__"]:
+                for included in makelist(
+                        self.data["__control__"]["include_files"]):
+                    included = find_file(
+                        included,
+                        relative=self.filename,
+                        universe=self.universe)
+                    if included not in includes:
+                        includes.append(included)
+            if "include_dirs" in self.data["__control__"]:
+                for included in [
+                    os.path.join(x, "__init__.mpy") for x in makelist(
+                        self.data["__control__"]["include_dirs"]
+                    )
+                ]:
+                    included = find_file(
+                        included,
+                        relative=self.filename,
+                        universe=self.universe
+                    )
+                    if included not in includes:
+                        includes.append(included)
+            if "default_files" in self.data["__control__"]:
+                origins = makedict(
+                    self.data["__control__"]["default_files"]
+                )
+                for key in origins.keys():
+                    origins[key] = find_file(
+                        origins[key],
+                        relative=self.filename,
+                        universe=self.universe
+                    )
+                    if origins[key] not in includes:
+                        includes.append(origins[key])
+                    self.universe.default_origins[key] = origins[key]
+                    if key not in self.universe.categories:
+                        self.universe.categories[key] = {}
+            if "private_files" in self.data["__control__"]:
+                for item in makelist(
+                    self.data["__control__"]["private_files"]
+                ):
+                    item = find_file(
+                        item,
+                        relative=self.filename,
+                        universe=self.universe
+                    )
+                    if item not in includes:
+                        includes.append(item)
+                    if item not in self.universe.private_files:
+                        self.universe.private_files.append(item)
+        for element in self.data:
+            if element != "__control__":
+                mudpy.misc.Element(element, self.universe, self.filename)
+        for include_file in includes:
+            if not os.path.isabs(include_file):
+                include_file = find_file(
+                    include_file,
+                    relative=self.filename,
+                    universe=self.universe
+                )
+            if (include_file not in self.universe.files or not
+                    self.universe.files[include_file].is_writeable()):
+                DataFile(include_file, self.universe)
+
+    # TODO(fungi): remove this method after the YAML transition
+    def load_mpy(self):
+        """Read a file and create elements accordingly."""
+        self._format = 'mpy'
         self.modified = False
+        self.data = configparser.RawConfigParser()
         if os.access(self.filename, os.R_OK):
             self.data.read(self.filename)
-        if not hasattr(self.universe, u"files"):
+        if not hasattr(self.universe, "files"):
             self.universe.files = {}
         self.universe.files[self.filename] = self
         includes = []
-        if self.data.has_option(u"__control__", u"include_files"):
+        if self.data.has_option("__control__", "include_files"):
             for included in makelist(
-                self.data.get(u"__control__", u"include_files")
+                self.data.get("__control__", "include_files")
             ):
                 included = find_file(
                     included,
@@ -40,10 +139,10 @@ class DataFile:
                 )
                 if included not in includes:
                     includes.append(included)
-        if self.data.has_option(u"__control__", u"include_dirs"):
+        if self.data.has_option("__control__", "include_dirs"):
             for included in [
-                os.path.join(x, u"__init__.mpy") for x in makelist(
-                    self.data.get(u"__control__", u"include_dirs")
+                os.path.join(x, "__init__.mpy") for x in makelist(
+                    self.data.get("__control__", "include_dirs")
                 )
             ]:
                 included = find_file(
@@ -53,9 +152,9 @@ class DataFile:
                 )
                 if included not in includes:
                     includes.append(included)
-        if self.data.has_option(u"__control__", u"default_files"):
+        if self.data.has_option("__control__", "default_files"):
             origins = makedict(
-                self.data.get(u"__control__", u"default_files")
+                self.data.get("__control__", "default_files")
             )
             for key in origins.keys():
                 origins[key] = find_file(
@@ -68,9 +167,9 @@ class DataFile:
                 self.universe.default_origins[key] = origins[key]
                 if key not in self.universe.categories:
                     self.universe.categories[key] = {}
-        if self.data.has_option(u"__control__", u"private_files"):
+        if self.data.has_option("__control__", "private_files"):
             for item in makelist(
-                self.data.get(u"__control__", u"private_files")
+                self.data.get("__control__", "private_files")
             ):
                 item = find_file(
                     item,
@@ -82,8 +181,8 @@ class DataFile:
                 if item not in self.universe.private_files:
                     self.universe.private_files.append(item)
         for section in self.data.sections():
-            if section != u"__control__":
-                misc.Element(section, self.universe, self.filename)
+            if section != "__control__":
+                mudpy.misc.Element(section, self.universe, self.filename)
         for include_file in includes:
             if not os.path.isabs(include_file):
                 include_file = find_file(
@@ -95,13 +194,9 @@ class DataFile:
                     self.universe.files[include_file].is_writeable()):
                 DataFile(include_file, self.universe)
 
+    # TODO(fungi): this should support writing YAML
     def save(self):
-        u"""Write the data, if necessary."""
-        import codecs
-        import os
-        import os.path
-        import re
-        import stat
+        """Write the data, if necessary."""
 
         # when modified, writeable and has content or the file exists
         if self.modified and self.is_writeable() and (
@@ -113,60 +208,60 @@ class DataFile:
                 os.makedirs(os.path.dirname(self.filename))
 
             # backup the file
-            if self.data.has_option(u"__control__", u"backup_count"):
+            if self.data.has_option("__control__", "backup_count"):
                 max_count = self.data.has_option(
-                    u"__control__", u"backup_count")
+                    "__control__", "backup_count")
             else:
                 max_count = self.universe.categories[
-                    u"internal"
+                    "internal"
                 ][
-                    u"limits"
-                ].getint(u"default_backup_count")
+                    "limits"
+                ].getint("default_backup_count")
             if os.path.exists(self.filename) and max_count:
                 backups = []
                 for candidate in os.listdir(os.path.dirname(self.filename)):
                     if re.match(
                        os.path.basename(self.filename) +
-                       u"""\.\d+$""", candidate
+                       """\.\d+$""", candidate
                        ):
-                        backups.append(int(candidate.split(u".")[-1]))
+                        backups.append(int(candidate.split(".")[-1]))
                 backups.sort()
                 backups.reverse()
                 for old_backup in backups:
                     if old_backup >= max_count - 1:
-                        os.remove(self.filename + u"." + unicode(old_backup))
+                        os.remove(self.filename + "." + old_backup)
                     elif not os.path.exists(
-                        self.filename + u"." + unicode(old_backup + 1)
+                        self.filename + "." + old_backup + 1
                     ):
                         os.rename(
-                            self.filename + u"." + unicode(old_backup),
-                            self.filename + u"." + unicode(old_backup + 1)
+                            self.filename + "." + old_backup,
+                            self.filename + "." + old_backup + 1
                         )
-                if not os.path.exists(self.filename + u".0"):
-                    os.rename(self.filename, self.filename + u".0")
+                if not os.path.exists(self.filename + ".0"):
+                    os.rename(self.filename, self.filename + ".0")
 
             # our data file
-            file_descriptor = codecs.open(self.filename, u"w", u"utf-8")
+            file_descriptor = codecs.open(self.filename, "w", "utf-8")
 
             # if it's marked private, chmod it appropriately
             if self.filename in self.universe.private_files and oct(
                stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
-               ) != 0600:
-                os.chmod(self.filename, 0600)
+               ) != 0o0600:
+                os.chmod(self.filename, 0o0600)
 
-            # write it back sorted, instead of using ConfigParser
+            # write it back sorted, instead of using configparser
             sections = self.data.sections()
             sections.sort()
             for section in sections:
-                file_descriptor.write(u"[" + section + u"]\n")
+                file_descriptor.write("[" + section + "]\n")
                 options = self.data.options(section)
                 options.sort()
                 for option in options:
                     file_descriptor.write(
-                        option + u" = " +
-                        self.data.get(section, option) + u"\n"
+                        option + " = " +
+                        self.data.get(section, option) + "\n"
                     )
-                file_descriptor.write(u"\n")
+                file_descriptor.write("\n")
 
             # flush and close the file
             file_descriptor.flush()
@@ -175,12 +270,13 @@ class DataFile:
             # unset the modified flag
             self.modified = False
 
+    # TODO(fungi): this should support writing YAML
     def is_writeable(self):
-        u"""Returns True if the __control__ read_only is False."""
+        """Returns True if the __control__ read_only is False."""
         return not self.data.has_option(
-            u"__control__", u"read_only"
+            "__control__", "read_only"
         ) or not self.data.getboolean(
-            u"__control__", u"read_only"
+            "__control__", "read_only"
         )
 
 
@@ -192,14 +288,11 @@ def find_file(
     relative=None,
     universe=None
 ):
-    u"""Return an absolute file path based on configuration."""
-    import os
-    import os.path
-    import sys
+    """Return an absolute file path based on configuration."""
 
     # make sure to get rid of any surrounding quotes first thing
     if file_name:
-        file_name = file_name.strip(u"\"'")
+        file_name = file_name.strip("\"'")
 
     # this is all unnecessary if it's already absolute
     if file_name and os.path.isabs(file_name):
@@ -207,57 +300,58 @@ def find_file(
 
     # when no file name is specified, look for <argv[0]>.conf
     elif not file_name:
-        file_name = os.path.basename(sys.argv[0]) + u".conf"
+        file_name = os.path.basename(sys.argv[0]) + ".conf"
 
     # if a universe was provided, try to get some defaults from there
     if universe:
 
         if hasattr(
            universe,
-           u"contents"
-           ) and u"internal:storage" in universe.contents:
-            storage = universe.categories[u"internal"][u"storage"]
+           "contents"
+           ) and "internal:storage" in universe.contents:
+            storage = universe.categories["internal"]["storage"]
             if not root_path:
-                root_path = storage.get(u"root_path").strip("\"'")
+                root_path = storage.get("root_path").strip("\"'")
             if not search_path:
-                search_path = storage.getlist(u"search_path")
+                search_path = storage.getlist("search_path")
             if not default_dir:
-                default_dir = storage.get(u"default_dir").strip("\"'")
+                default_dir = storage.get("default_dir").strip("\"'")
 
         # if there's only one file loaded, try to work around a chicken<egg
-        elif hasattr(universe, u"files") and len(
+        elif hasattr(universe, "files") and len(
             universe.files
-        ) == 1 and not universe.files[universe.files.keys()[0]].is_writeable():
-            data_file = universe.files[universe.files.keys()[0]].data
+        ) == 1 and not universe.files[
+                list(universe.files.keys())[0]].is_writeable():
+            data_file = universe.files[list(universe.files.keys())[0]].data
 
             # try for a fallback default directory
             if not default_dir and data_file.has_option(
-               u"internal:storage",
-               u"default_dir"
+               "internal:storage",
+               "default_dir"
                ):
                 default_dir = data_file.get(
-                    u"internal:storage",
-                    u"default_dir"
-                ).strip(u"\"'")
+                    "internal:storage",
+                    "default_dir"
+                ).strip("\"'")
 
             # try for a fallback root path
             if not root_path and data_file.has_option(
-               u"internal:storage",
-               u"root_path"
+               "internal:storage",
+               "root_path"
                ):
                 root_path = data_file.get(
-                    u"internal:storage",
-                    u"root_path"
-                ).strip(u"\"'")
+                    "internal:storage",
+                    "root_path"
+                ).strip("\"'")
 
             # try for a fallback search path
             if not search_path and data_file.has_option(
-               u"internal:storage",
-               u"search_path"
+               "internal:storage",
+               "search_path"
                ):
                 search_path = makelist(
-                    data_file.get(u"internal:storage",
-                                  u"search_path").strip(u"\"'")
+                    data_file.get("internal:storage",
+                                  "search_path").strip("\"'")
                 )
 
         # another fallback root path, this time from the universe startdir
@@ -274,7 +368,7 @@ def find_file(
 
     # if there's no search path, just use the root path and etc
     if not search_path:
-        search_path = [root_path, u"etc"]
+        search_path = [root_path, "etc"]
 
     # work on a copy of the search path, to avoid modifying the caller's
     else:
@@ -286,7 +380,7 @@ def find_file(
 
     # if an existing file or directory reference was supplied, prepend it
     if relative:
-        relative = relative.strip(u"\"'")
+        relative = relative.strip("\"'")
         if os.path.isdir(relative):
             search_path = [relative] + search_path
         else:
@@ -295,7 +389,7 @@ def find_file(
     # make the search path entries absolute and throw away any dupes
     clean_search_path = []
     for each_path in search_path:
-        each_path = each_path.strip(u"\"'")
+        each_path = each_path.strip("\"'")
         if not os.path.isabs(each_path):
             each_path = os.path.realpath(os.path.join(root_path, each_path))
         if each_path not in clean_search_path:
@@ -323,20 +417,20 @@ def find_file(
 
 
 def makelist(value):
-    u"""Turn string into list type."""
-    if value[0] + value[-1] == u"[]":
+    """Turn string into list type."""
+    if value[0] + value[-1] == "[]":
         return eval(value)
-    elif value[0] + value[-1] == u"\"\"":
+    elif value[0] + value[-1] == "\"\"":
         return [value[1:-1]]
     else:
         return [value]
 
 
 def makedict(value):
-    u"""Turn string into dict type."""
-    if value[0] + value[-1] == u"{}":
+    """Turn string into dict type."""
+    if value[0] + value[-1] == "{}":
         return eval(value)
-    elif value.find(u":") > 0:
-        return eval(u"{" + value + u"}")
+    elif value.find(":") > 0:
+        return eval("{" + value + "}")
     else:
         return {value: None}