Overhaul data management to get rid of __control__
[mudpy.git] / mudpy / data.py
index e9a1079..f113369 100644 (file)
@@ -12,24 +12,36 @@ import mudpy
 import yaml
 
 
-class DataFile:
+class Data:
 
     """A file containing universe elements and their facets."""
 
-    def __init__(self, filename, universe):
-        self.filename = filename
+    def __init__(self,
+                 source,
+                 universe,
+                 flags=None,
+                 relative=None,
+                 ):
+        self.source = source
         self.universe = universe
-        self.data = {}
+        if flags is None:
+            self.flags = []
+        else:
+            self.flags = flags[:]
+        self.relative = relative
         self.load()
 
     def load(self):
         """Read a file, create elements and poplulate facets accordingly."""
         self.modified = False
+        self.source = find_file(
+                self.source, relative=self.relative, universe=self.universe)
         try:
-            self.data = yaml.safe_load(open(self.filename))
+            self.data = yaml.safe_load(open(self.source))
         except FileNotFoundError:
             # it's normal if the file is one which doesn't exist yet
-            log_entry = ("File %s is unavailable." % self.filename, 6)
+            self.data = {}
+            log_entry = ("File %s is unavailable." % self.source, 6)
             try:
                 mudpy.misc.log(*log_entry)
             except NameError:
@@ -37,67 +49,27 @@ class DataFile:
                 self.universe.setup_loglines.append(log_entry)
         if not hasattr(self.universe, "files"):
             self.universe.files = {}
-        self.universe.files[self.filename] = self
+        self.universe.files[self.source] = self
         includes = []
-        if "__control__" in self.data:
-            if "include_files" in self.data["__control__"]:
-                for included in self.data["__control__"]["include_files"]:
+        for node in list(self.data):
+            if node == "_load":
+                for included in self.data["_load"]:
                     included = find_file(
                         included,
-                        relative=self.filename,
+                        relative=self.source,
                         universe=self.universe)
                     if included not in includes:
                         includes.append(included)
-            if "include_dirs" in self.data["__control__"]:
-                for included in [
-                    os.path.join(x, "__init__.yaml") for x in
-                        self.data["__control__"]["include_dirs"]
-                ]:
-                    included = find_file(
-                        included,
-                        relative=self.filename,
-                        universe=self.universe
-                    )
-                    if included not in includes:
-                        includes.append(included)
-            if "default_files" in self.data["__control__"]:
-                origins = self.data["__control__"]["default_files"]
-                for key in origins.keys():
-                    origins[key] = find_file(
-                        origins[key],
-                        relative=self.filename,
-                        universe=self.universe
-                    )
-                    if origins[key] not in includes:
-                        includes.append(origins[key])
-                    self.universe.default_origins[key] = origins[key]
-                    if key not in self.universe.categories:
-                        self.universe.categories[key] = {}
-            if "private_files" in self.data["__control__"]:
-                for item in self.data["__control__"]["private_files"]:
-                    item = find_file(
-                        item,
-                        relative=self.filename,
-                        universe=self.universe
-                    )
-                    if item not in includes:
-                        includes.append(item)
-                    if item not in self.universe.private_files:
-                        self.universe.private_files.append(item)
-        for node in list(self.data):
-            if node == "__control__":
                 continue
             facet_pos = node.rfind(".") + 1
             if not facet_pos:
-                mudpy.misc.Element(node, self.universe, self.filename,
-                                   old_style=True)
+                mudpy.misc.Element(node, self.universe, self, old_style=True)
             else:
                 prefix = node[:facet_pos].strip(".")
                 try:
                     element = self.universe.contents[prefix]
                 except KeyError:
-                    element = mudpy.misc.Element(prefix, self.universe,
-                                                 self.filename)
+                    element = mudpy.misc.Element(prefix, self.universe, self)
                 element.set(node[facet_pos:], self.data[node])
                 if prefix.startswith("mudpy.movement."):
                     self.universe.directions.add(
@@ -106,12 +78,12 @@ class DataFile:
             if not os.path.isabs(include_file):
                 include_file = find_file(
                     include_file,
-                    relative=self.filename,
+                    relative=self.source,
                     universe=self.universe
                 )
             if (include_file not in self.universe.files or not
                     self.universe.files[include_file].is_writeable()):
-                DataFile(include_file, self.universe)
+                Data(include_file, self.universe)
 
     def save(self):
         """Write the data, if necessary."""
@@ -121,29 +93,26 @@ class DataFile:
 
         # when modified, writeable and has content or the file exists
         if self.modified and self.is_writeable() and (
-           self.data or os.path.exists(self.filename)
+           self.data or os.path.exists(self.source)
            ):
 
             # make parent directories if necessary
-            if not os.path.exists(os.path.dirname(self.filename)):
+            if not os.path.exists(os.path.dirname(self.source)):
                 old_umask = os.umask(normal_umask)
-                os.makedirs(os.path.dirname(self.filename))
+                os.makedirs(os.path.dirname(self.source))
                 os.umask(old_umask)
 
             # backup the file
-            if "__control__" in self.data and "backup_count" in self.data[
-                    "__control__"]:
-                max_count = self.data["__control__"]["backup_count"]
-            elif "mudpy.limit" in self.universe.contents:
+            if "mudpy.limit" in self.universe.contents:
                 max_count = self.universe.contents["mudpy.limit"].get(
                     "backups", 0)
             else:
                 max_count = 0
-            if os.path.exists(self.filename) and max_count:
+            if os.path.exists(self.source) and max_count:
                 backups = []
-                for candidate in os.listdir(os.path.dirname(self.filename)):
+                for candidate in os.listdir(os.path.dirname(self.source)):
                     if re.match(
-                       os.path.basename(self.filename) +
+                       os.path.basename(self.source) +
                        r"""\.\d+$""", candidate
                        ):
                         backups.append(int(candidate.split(".")[-1]))
@@ -151,28 +120,28 @@ class DataFile:
                 backups.reverse()
                 for old_backup in backups:
                     if old_backup >= max_count - 1:
-                        os.remove(self.filename + "." + str(old_backup))
+                        os.remove(self.source + "." + str(old_backup))
                     elif not os.path.exists(
-                        self.filename + "." + str(old_backup + 1)
+                        self.source + "." + str(old_backup + 1)
                     ):
                         os.rename(
-                            self.filename + "." + str(old_backup),
-                            self.filename + "." + str(old_backup + 1)
+                            self.source + "." + str(old_backup),
+                            self.source + "." + str(old_backup + 1)
                         )
-                if not os.path.exists(self.filename + ".0"):
-                    os.rename(self.filename, self.filename + ".0")
+                if not os.path.exists(self.source + ".0"):
+                    os.rename(self.source, self.source + ".0")
 
             # our data file
-            if self.filename in self.universe.private_files:
+            if "private" in self.flags:
                 old_umask = os.umask(private_umask)
-                file_descriptor = open(self.filename, "w")
+                file_descriptor = open(self.source, "w")
                 if oct(stat.S_IMODE(os.stat(
-                        self.filename)[stat.ST_MODE])) != private_file_mode:
+                        self.source)[stat.ST_MODE])) != private_file_mode:
                     # if it's marked private, chmod it appropriately
-                    os.chmod(self.filename, private_file_mode)
+                    os.chmod(self.source, private_file_mode)
             else:
                 old_umask = os.umask(normal_umask)
-                file_descriptor = open(self.filename, "w")
+                file_descriptor = open(self.source, "w")
             os.umask(old_umask)
 
             # write and close the file
@@ -184,15 +153,16 @@ class DataFile:
             self.modified = False
 
     def is_writeable(self):
-        """Returns True if the __control__ read_only is False."""
+        """Returns True if the _lock is False."""
         try:
-            return not self.data["__control__"].get("read_only", False)
+            return not self.data.get("_lock", False)
         except KeyError:
             return True
 
 
 def find_file(
     file_name=None,
+    category=None,
     prefix=None,
     relative=None,
     search=None,
@@ -281,9 +251,18 @@ def find_file(
     # start hunting for the file now
     for each_path in clean_search:
 
+        # construct the candidate path
+        candidate = os.path.join(each_path, file_name)
+
         # if the file exists and is readable, we're done
-        if os.path.isfile(os.path.join(each_path, file_name)):
-            file_name = os.path.realpath(os.path.join(each_path, file_name))
+        if os.path.isfile(candidate):
+            file_name = os.path.realpath(candidate)
+            break
+
+        # if the path is a directory, look for an __init__ file
+        if os.path.isdir(candidate):
+            file_name = os.path.realpath(
+                    os.path.join(candidate, "__init__.yaml"))
             break
 
     # it didn't exist after all, so use the default path instead