PEP 8 conformance for data handling library
authorJeremy Stanley <fungi@yuggoth.org>
Tue, 11 Jan 2011 10:49:21 +0000 (10:49 +0000)
committerJeremy Stanley <fungi@yuggoth.org>
Tue, 11 Jan 2011 10:49:21 +0000 (10:49 +0000)
* lib/mudpy/data.py: Conform to the PEP 8 style guide.

lib/mudpy/data.py

index 7960176..f94b02b 100644 (file)
 # -*- coding: utf-8 -*-
 u"""Data interface functions for the mudpy engine."""
 
-# Copyright (c) 2004-2010 Jeremy Stanley <fungi@yuggoth.org>. Permission
+# Copyright (c) 2004-2011 Jeremy Stanley <fungi@yuggoth.org>. Permission
 # to use, copy, modify, and distribute this software is granted under
 # terms provided in the LICENSE file distributed with this software.
 
+
 class DataFile:
-   u"""A file containing universe elements."""
-   def __init__(self, filename, universe):
-      self.filename = filename
-      self.universe = universe
-      self.load()
-   def load(self):
-      u"""Read a file and create elements accordingly."""
-      import ConfigParser, misc, os, os.path
-      self.data = ConfigParser.RawConfigParser()
-      self.modified = False
-      if os.access(self.filename, os.R_OK): self.data.read(self.filename)
-      if not hasattr(self.universe, u"files"): self.universe.files = {}
-      self.universe.files[self.filename] = self
-      includes = []
-      if self.data.has_option(u"__control__", u"include_files"):
-         for included in makelist(
-            self.data.get(u"__control__", u"include_files")
-         ):
-            included = find_file(
-               included,
-               relative=self.filename,
-               universe=self.universe
-            )
-            if included not in includes: includes.append(included)
-      if self.data.has_option(u"__control__", u"include_dirs"):
-         for included in [
-            os.path.join(x, u"__init__.mpy") for x in makelist(
-               self.data.get(u"__control__", u"include_dirs")
-            )
-         ]:
-            included = find_file(
-               included,
-               relative=self.filename,
-               universe=self.universe
-            )
-            if included not in includes: includes.append(included)
-      if self.data.has_option(u"__control__", u"default_files"):
-         origins = makedict(
-            self.data.get(u"__control__", u"default_files")
-         )
-         for key in origins.keys():
-            origins[key] = find_file(
-               origins[key],
-               relative=self.filename,
-               universe=self.universe
-            )
-            if origins[key] not in includes: includes.append(origins[key])
-            self.universe.default_origins[key] = origins[key]
-            if key not in self.universe.categories:
-               self.universe.categories[key] = {}
-      if self.data.has_option(u"__control__", u"private_files"):
-         for item in makelist(
-            self.data.get(u"__control__", u"private_files")
-         ):
-            item = find_file(
-               item,
-               relative=self.filename,
-               universe=self.universe
-            )
-            if item not in includes: includes.append(item)
-            if item not in self.universe.private_files:
-               self.universe.private_files.append(item)
-      for section in self.data.sections():
-         if section != u"__control__":
-            misc.Element(section, self.universe, self.filename)
-      for include_file in includes:
-         if not os.path.isabs(include_file):
-            include_file = find_file(
-               include_file,
-               relative=self.filename,
-               universe=self.universe
+
+    u"""A file containing universe elements."""
+
+    def __init__(self, filename, universe):
+        self.filename = filename
+        self.universe = universe
+        self.load()
+
+    def load(self):
+        u"""Read a file and create elements accordingly."""
+        import ConfigParser
+        import misc
+        import os
+        import os.path
+        self.data = ConfigParser.RawConfigParser()
+        self.modified = False
+        if os.access(self.filename, os.R_OK):
+            self.data.read(self.filename)
+        if not hasattr(self.universe, u"files"):
+            self.universe.files = {}
+        self.universe.files[self.filename] = self
+        includes = []
+        if self.data.has_option(u"__control__", u"include_files"):
+            for included in makelist(
+                self.data.get(u"__control__", u"include_files")
+            ):
+                included = find_file(
+                    included,
+                    relative=self.filename,
+                    universe=self.universe
+                )
+                if included not in includes:
+                    includes.append(included)
+        if self.data.has_option(u"__control__", u"include_dirs"):
+            for included in [
+                os.path.join(x, u"__init__.mpy") for x in makelist(
+                    self.data.get(u"__control__", u"include_dirs")
+                )
+            ]:
+                included = find_file(
+                    included,
+                    relative=self.filename,
+                    universe=self.universe
+                )
+                if included not in includes:
+                    includes.append(included)
+        if self.data.has_option(u"__control__", u"default_files"):
+            origins = makedict(
+                self.data.get(u"__control__", u"default_files")
             )
-         if include_file not in self.universe.files or not self.universe.files[
-            include_file
-         ].is_writeable():
-            DataFile(include_file, self.universe)
-   def save(self):
-      u"""Write the data, if necessary."""
-      import codecs, os, os.path, re, stat
-
-      # when modified, writeable and has content or the file exists
-      if self.modified and self.is_writeable() and (
-         self.data.sections() or os.path.exists(self.filename)
-      ):
-
-         # make parent directories if necessary
-         if not os.path.exists(os.path.dirname(self.filename)):
-            os.makedirs(os.path.dirname(self.filename))
-
-         # backup the file
-         if self.data.has_option(u"__control__", u"backup_count"):
-            max_count = self.data.has_option(u"__control__", u"backup_count")
-         else:
-            max_count = self.universe.categories[
-               u"internal"
-            ][
-               u"limits"
-            ].getint(u"default_backup_count")
-         if os.path.exists(self.filename) and max_count:
-            backups = []
-            for candidate in os.listdir(os.path.dirname(self.filename)):
-               if re.match(
-                  os.path.basename(self.filename) + u"""\.\d+$""", candidate
-               ):
-                  backups.append(int(candidate.split(u".")[-1]))
-            backups.sort()
-            backups.reverse()
-            for old_backup in backups:
-               if old_backup >= max_count-1:
-                  os.remove(self.filename+u"."+unicode(old_backup))
-               elif not os.path.exists(
-                  self.filename+u"."+unicode(old_backup+1)
-               ):
-                  os.rename(
-                     self.filename + u"."+unicode(old_backup),
-                     self.filename + u"."+unicode( old_backup + 1 )
-                  )
-            if not os.path.exists(self.filename+u".0"):
-               os.rename( self.filename, self.filename + u".0" )
-
-         # our data file
-         file_descriptor = codecs.open(self.filename, u"w", u"utf-8")
-
-         # if it's marked private, chmod it appropriately
-         if self.filename in self.universe.private_files and oct(
-            stat.S_IMODE( os.stat(self.filename)[stat.ST_MODE] )
-         ) != 0600:
-            os.chmod(self.filename, 0600)
-
-         # write it back sorted, instead of using ConfigParser
-         sections = self.data.sections()
-         sections.sort()
-         for section in sections:
-            file_descriptor.write(u"[" + section + u"]\n")
-            options = self.data.options(section)
-            options.sort()
-            for option in options:
-               file_descriptor.write(
-                  option + u" = " + self.data.get(section, option) + u"\n"
-               )
-            file_descriptor.write(u"\n")
-
-         # flush and close the file
-         file_descriptor.flush()
-         file_descriptor.close()
-
-         # unset the modified flag
-         self.modified = False
-   def is_writeable(self):
-      u"""Returns True if the __control__ read_only is False."""
-      return not self.data.has_option(
-         u"__control__", u"read_only"
-      ) or not self.data.getboolean(
-         u"__control__", u"read_only"
-      )
+            for key in origins.keys():
+                origins[key] = find_file(
+                    origins[key],
+                    relative=self.filename,
+                    universe=self.universe
+                )
+                if origins[key] not in includes:
+                    includes.append(origins[key])
+                self.universe.default_origins[key] = origins[key]
+                if key not in self.universe.categories:
+                    self.universe.categories[key] = {}
+        if self.data.has_option(u"__control__", u"private_files"):
+            for item in makelist(
+                self.data.get(u"__control__", u"private_files")
+            ):
+                item = find_file(
+                    item,
+                    relative=self.filename,
+                    universe=self.universe
+                )
+                if item not in includes:
+                    includes.append(item)
+                if item not in self.universe.private_files:
+                    self.universe.private_files.append(item)
+        for section in self.data.sections():
+            if section != u"__control__":
+                misc.Element(section, self.universe, self.filename)
+        for include_file in includes:
+            if not os.path.isabs(include_file):
+                include_file = find_file(
+                    include_file,
+                    relative=self.filename,
+                    universe=self.universe
+                )
+            if include_file not in self.universe.files or not self.universe.files[
+               include_file
+               ].is_writeable():
+                DataFile(include_file, self.universe)
+
+    def save(self):
+        u"""Write the data, if necessary."""
+        import codecs
+        import os
+        import os.path
+        import re
+        import stat
+
+        # when modified, writeable and has content or the file exists
+        if self.modified and self.is_writeable() and (
+           self.data.sections() or os.path.exists(self.filename)
+           ):
+
+            # make parent directories if necessary
+            if not os.path.exists(os.path.dirname(self.filename)):
+                os.makedirs(os.path.dirname(self.filename))
+
+            # backup the file
+            if self.data.has_option(u"__control__", u"backup_count"):
+                max_count = self.data.has_option(
+                    u"__control__", u"backup_count")
+            else:
+                max_count = self.universe.categories[
+                    u"internal"
+                ][
+                    u"limits"
+                ].getint(u"default_backup_count")
+            if os.path.exists(self.filename) and max_count:
+                backups = []
+                for candidate in os.listdir(os.path.dirname(self.filename)):
+                    if re.match(
+                       os.path.basename(self.filename) +
+                       u"""\.\d+$""", candidate
+                       ):
+                        backups.append(int(candidate.split(u".")[-1]))
+                backups.sort()
+                backups.reverse()
+                for old_backup in backups:
+                    if old_backup >= max_count - 1:
+                        os.remove(self.filename + u"." + unicode(old_backup))
+                    elif not os.path.exists(
+                        self.filename + u"." + unicode(old_backup + 1)
+                    ):
+                        os.rename(
+                            self.filename + u"." + unicode(old_backup),
+                            self.filename + u"." + unicode(old_backup + 1)
+                        )
+                if not os.path.exists(self.filename + u".0"):
+                    os.rename(self.filename, self.filename + u".0")
+
+            # our data file
+            file_descriptor = codecs.open(self.filename, u"w", u"utf-8")
+
+            # if it's marked private, chmod it appropriately
+            if self.filename in self.universe.private_files and oct(
+               stat.S_IMODE(os.stat(self.filename)[stat.ST_MODE])
+               ) != 0600:
+                os.chmod(self.filename, 0600)
+
+            # write it back sorted, instead of using ConfigParser
+            sections = self.data.sections()
+            sections.sort()
+            for section in sections:
+                file_descriptor.write(u"[" + section + u"]\n")
+                options = self.data.options(section)
+                options.sort()
+                for option in options:
+                    file_descriptor.write(
+                        option + u" = " +
+                        self.data.get(section, option) + u"\n"
+                    )
+                file_descriptor.write(u"\n")
+
+            # flush and close the file
+            file_descriptor.flush()
+            file_descriptor.close()
+
+            # unset the modified flag
+            self.modified = False
+
+    def is_writeable(self):
+        u"""Returns True if the __control__ read_only is False."""
+        return not self.data.has_option(
+            u"__control__", u"read_only"
+        ) or not self.data.getboolean(
+            u"__control__", u"read_only"
+        )
+
 
 def find_file(
-   file_name=None,
-   root_path=None,
-   search_path=None,
-   default_dir=None,
-   relative=None,
-   universe=None
+    file_name=None,
+    root_path=None,
+    search_path=None,
+    default_dir=None,
+    relative=None,
+    universe=None
 ):
-   u"""Return an absolute file path based on configuration."""
-   import os, os.path, sys
-
-   # make sure to get rid of any surrounding quotes first thing
-   if file_name: file_name = file_name.strip(u"\"'")
-
-   # this is all unnecessary if it's already absolute
-   if file_name and os.path.isabs(file_name):
-      return os.path.realpath(file_name)
-
-   # when no file name is specified, look for <argv[0]>.conf
-   elif not file_name: file_name = os.path.basename( sys.argv[0] ) + u".conf"
-
-   # if a universe was provided, try to get some defaults from there
-   if universe:
-
-      if hasattr(
-         universe,
-         u"contents"
-      ) and u"internal:storage" in universe.contents:
-         storage = universe.categories[u"internal"][u"storage"]
-         if not root_path: root_path = storage.get(u"root_path").strip("\"'")
-         if not search_path: search_path = storage.getlist(u"search_path")
-         if not default_dir:
-            default_dir = storage.get(u"default_dir").strip("\"'")
-
-      # if there's only one file loaded, try to work around a chicken<egg
-      elif hasattr(universe, u"files") and len(
-         universe.files
-      ) == 1 and not universe.files[universe.files.keys()[0]].is_writeable():
-         data_file = universe.files[universe.files.keys()[0]].data
-
-         # try for a fallback default directory
-         if not default_dir and data_file.has_option(
-            u"internal:storage",
-            u"default_dir"
-         ):
-            default_dir = data_file.get(
+    u"""Return an absolute file path based on configuration."""
+    import os
+    import os.path
+    import sys
+
+    # make sure to get rid of any surrounding quotes first thing
+    if file_name:
+        file_name = file_name.strip(u"\"'")
+
+    # this is all unnecessary if it's already absolute
+    if file_name and os.path.isabs(file_name):
+        return os.path.realpath(file_name)
+
+    # when no file name is specified, look for <argv[0]>.conf
+    elif not file_name:
+        file_name = os.path.basename(sys.argv[0]) + u".conf"
+
+    # if a universe was provided, try to get some defaults from there
+    if universe:
+
+        if hasattr(
+           universe,
+           u"contents"
+           ) and u"internal:storage" in universe.contents:
+            storage = universe.categories[u"internal"][u"storage"]
+            if not root_path:
+                root_path = storage.get(u"root_path").strip("\"'")
+            if not search_path:
+                search_path = storage.getlist(u"search_path")
+            if not default_dir:
+                default_dir = storage.get(u"default_dir").strip("\"'")
+
+        # if there's only one file loaded, try to work around a chicken<egg
+        elif hasattr(universe, u"files") and len(
+            universe.files
+        ) == 1 and not universe.files[universe.files.keys()[0]].is_writeable():
+            data_file = universe.files[universe.files.keys()[0]].data
+
+            # try for a fallback default directory
+            if not default_dir and data_file.has_option(
                u"internal:storage",
                u"default_dir"
-            ).strip(u"\"'")
-
-         # try for a fallback root path
-         if not root_path and data_file.has_option(
-            u"internal:storage",
-            u"root_path"
-         ):
-            root_path = data_file.get(
+               ):
+                default_dir = data_file.get(
+                    u"internal:storage",
+                    u"default_dir"
+                ).strip(u"\"'")
+
+            # try for a fallback root path
+            if not root_path and data_file.has_option(
                u"internal:storage",
                u"root_path"
-            ).strip(u"\"'")
-
-         # try for a fallback search path
-         if not search_path and data_file.has_option(
-            u"internal:storage",
-            u"search_path"
-         ):
-            search_path = makelist(
-               data_file.get(u"internal:storage", u"search_path").strip(u"\"'")
-            )
-
-      # another fallback root path, this time from the universe startdir
-      if not root_path and hasattr(universe, "startdir"):
-         root_path = universe.startdir
-
-   # when no root path is specified, assume the current working directory
-   if not root_path: root_path = os.getcwd()
-
-   # otherwise, make sure it's absolute
-   elif not os.path.isabs(root_path): root_path = os.path.realpath(root_path)
-
-   # if there's no search path, just use the root path and etc
-   if not search_path: search_path = [root_path, u"etc"]
-
-   # work on a copy of the search path, to avoid modifying the caller's
-   else: search_path = search_path[:]
-
-   # if there's no default path, use the last element of the search path
-   if not default_dir: default_dir = search_path[-1]
-
-   # if an existing file or directory reference was supplied, prepend it
-   if relative:
-      relative = relative.strip(u"\"'")
-      if os.path.isdir(relative): search_path = [relative] + search_path
-      else: search_path = [ os.path.dirname(relative) ] + search_path
-
-   # make the search path entries absolute and throw away any dupes
-   clean_search_path = []
-   for each_path in search_path:
-      each_path = each_path.strip(u"\"'")
-      if not os.path.isabs(each_path):
-         each_path = os.path.realpath( os.path.join(root_path, each_path) )
-      if each_path not in clean_search_path:
-         clean_search_path.append(each_path)
-
-   # start hunting for the file now
-   for each_path in clean_search_path:
-
-      # if the file exists and is readable, we're done
-      if os.path.isfile( os.path.join(each_path, file_name) ):
-         file_name = os.path.realpath( os.path.join(each_path, file_name) )
-         break
-
-   # it didn't exist after all, so use the default path instead
-   if not os.path.isabs(file_name):
-      file_name = os.path.join(default_dir, file_name)
-   if not os.path.isabs(file_name):
-      file_name = os.path.join(root_path, file_name)
+               ):
+                root_path = data_file.get(
+                    u"internal:storage",
+                    u"root_path"
+                ).strip(u"\"'")
 
-   # and normalize it last thing before returning
-   file_name = os.path.realpath(file_name)
+            # try for a fallback search path
+            if not search_path and data_file.has_option(
+               u"internal:storage",
+               u"search_path"
+               ):
+                search_path = makelist(
+                    data_file.get(u"internal:storage",
+                                  u"search_path").strip(u"\"'")
+                )
+
+        # another fallback root path, this time from the universe startdir
+        if not root_path and hasattr(universe, "startdir"):
+            root_path = universe.startdir
+
+    # when no root path is specified, assume the current working directory
+    if not root_path:
+        root_path = os.getcwd()
+
+    # otherwise, make sure it's absolute
+    elif not os.path.isabs(root_path):
+        root_path = os.path.realpath(root_path)
+
+    # if there's no search path, just use the root path and etc
+    if not search_path:
+        search_path = [root_path, u"etc"]
+
+    # work on a copy of the search path, to avoid modifying the caller's
+    else:
+        search_path = search_path[:]
+
+    # if there's no default path, use the last element of the search path
+    if not default_dir:
+        default_dir = search_path[-1]
+
+    # if an existing file or directory reference was supplied, prepend it
+    if relative:
+        relative = relative.strip(u"\"'")
+        if os.path.isdir(relative):
+            search_path = [relative] + search_path
+        else:
+            search_path = [os.path.dirname(relative)] + search_path
+
+    # make the search path entries absolute and throw away any dupes
+    clean_search_path = []
+    for each_path in search_path:
+        each_path = each_path.strip(u"\"'")
+        if not os.path.isabs(each_path):
+            each_path = os.path.realpath(os.path.join(root_path, each_path))
+        if each_path not in clean_search_path:
+            clean_search_path.append(each_path)
+
+    # start hunting for the file now
+    for each_path in clean_search_path:
+
+        # if the file exists and is readable, we're done
+        if os.path.isfile(os.path.join(each_path, file_name)):
+            file_name = os.path.realpath(os.path.join(each_path, file_name))
+            break
+
+    # it didn't exist after all, so use the default path instead
+    if not os.path.isabs(file_name):
+        file_name = os.path.join(default_dir, file_name)
+    if not os.path.isabs(file_name):
+        file_name = os.path.join(root_path, file_name)
+
+    # and normalize it last thing before returning
+    file_name = os.path.realpath(file_name)
+
+    # normalize the resulting file path and hand it back
+    return file_name
 
-   # normalize the resulting file path and hand it back
-   return file_name
 
 def makelist(value):
-   u"""Turn string into list type."""
-   if value[0] + value[-1] == u"[]": return eval(value)
-   elif value[0] + value[-1] == u"\"\"": return [ value[1:-1] ]
-   else: return [ value ]
+    u"""Turn string into list type."""
+    if value[0] + value[-1] == u"[]":
+        return eval(value)
+    elif value[0] + value[-1] == u"\"\"":
+        return [value[1:-1]]
+    else:
+        return [value]
+
 
 def makedict(value):
-   u"""Turn string into dict type."""
-   if value[0] + value[-1] == u"{}": return eval(value)
-   elif value.find(u":") > 0: return eval(u"{" + value + u"}")
-   else: return { value: None }
+    u"""Turn string into dict type."""
+    if value[0] + value[-1] == u"{}":
+        return eval(value)
+    elif value.find(u":") > 0:
+        return eval(u"{" + value + u"}")
+    else:
+        return {value: None}