Back end data storage separation.
[mudpy.git] / lib / mudpy / data.py
1 # -*- coding: utf-8 -*-
2 u"""Data interface functions for the mudpy engine."""
3
4 # Copyright (c) 2004-2010 Jeremy Stanley <fungi@yuggoth.org>. Permission
5 # to use, copy, modify, and distribute this software is granted under
6 # terms provided in the LICENSE file distributed with this software.
7
8 class DataFile:
9    u"""A file containing universe elements."""
10    def __init__(self, filename, universe):
11       self.filename = filename
12       self.universe = universe
13       self.load()
14    def load(self):
15       u"""Read a file and create elements accordingly."""
16       import ConfigParser, misc, os, os.path
17       self.data = ConfigParser.RawConfigParser()
18       self.modified = False
19       if os.access(self.filename, os.R_OK): self.data.read(self.filename)
20       if not hasattr(self.universe, u"files"): self.universe.files = {}
21       self.universe.files[self.filename] = self
22       includes = []
23       if self.data.has_option(u"__control__", u"include_files"):
24          for included in makelist(
25             self.data.get(u"__control__", u"include_files")
26          ):
27             included = find_file(
28                included,
29                relative=self.filename,
30                universe=self.universe
31             )
32             if included not in includes: includes.append(included)
33       if self.data.has_option(u"__control__", u"include_dirs"):
34          for included in [
35             os.path.join(x, u"__init__.mpy") for x in makelist(
36                self.data.get(u"__control__", u"include_dirs")
37             )
38          ]:
39             included = find_file(
40                included,
41                relative=self.filename,
42                universe=self.universe
43             )
44             if included not in includes: includes.append(included)
45       if self.data.has_option(u"__control__", u"default_files"):
46          origins = makedict(
47             self.data.get(u"__control__", u"default_files")
48          )
49          for key in origins.keys():
50             origins[key] = find_file(
51                origins[key],
52                relative=self.filename,
53                universe=self.universe
54             )
55             if origins[key] not in includes: includes.append(origins[key])
56             self.universe.default_origins[key] = origins[key]
57             if key not in self.universe.categories:
58                self.universe.categories[key] = {}
59       if self.data.has_option(u"__control__", u"private_files"):
60          for item in makelist(
61             self.data.get(u"__control__", u"private_files")
62          ):
63             item = find_file(
64                item,
65                relative=self.filename,
66                universe=self.universe
67             )
68             if item not in includes: includes.append(item)
69             if item not in self.universe.private_files:
70                self.universe.private_files.append(item)
71       for section in self.data.sections():
72          if section != u"__control__":
73             misc.Element(section, self.universe, self.filename)
74       for include_file in includes:
75          if not os.path.isabs(include_file):
76             include_file = find_file(
77                include_file,
78                relative=self.filename,
79                universe=self.universe
80             )
81          if include_file not in self.universe.files or not self.universe.files[
82             include_file
83          ].is_writeable():
84             DataFile(include_file, self.universe)
85    def save(self):
86       u"""Write the data, if necessary."""
87       import codecs, os, os.path, re, stat
88
89       # when modified, writeable and has content or the file exists
90       if self.modified and self.is_writeable() and (
91          self.data.sections() or os.path.exists(self.filename)
92       ):
93
94          # make parent directories if necessary
95          if not os.path.exists(os.path.dirname(self.filename)):
96             os.makedirs(os.path.dirname(self.filename))
97
98          # backup the file
99          if self.data.has_option(u"__control__", u"backup_count"):
100             max_count = self.data.has_option(u"__control__", u"backup_count")
101          else:
102             max_count = self.universe.categories[
103                u"internal"
104             ][
105                u"limits"
106             ].getint(u"default_backup_count")
107          if os.path.exists(self.filename) and max_count:
108             backups = []
109             for candidate in os.listdir(os.path.dirname(self.filename)):
110                if re.match(
111                   os.path.basename(self.filename) + u"""\.\d+$""", candidate
112                ):
113                   backups.append(int(candidate.split(u".")[-1]))
114             backups.sort()
115             backups.reverse()
116             for old_backup in backups:
117                if old_backup >= max_count-1:
118                   os.remove(self.filename+u"."+unicode(old_backup))
119                elif not os.path.exists(
120                   self.filename+u"."+unicode(old_backup+1)
121                ):
122                   os.rename(
123                      self.filename + u"."+unicode(old_backup),
124                      self.filename + u"."+unicode( old_backup + 1 )
125                   )
126             if not os.path.exists(self.filename+u".0"):
127                os.rename( self.filename, self.filename + u".0" )
128
129          # our data file
130          file_descriptor = codecs.open(self.filename, u"w", u"utf-8")
131
132          # if it's marked private, chmod it appropriately
133          if self.filename in self.universe.private_files and oct(
134             stat.S_IMODE( os.stat(self.filename)[stat.ST_MODE] )
135          ) != 0600:
136             os.chmod(self.filename, 0600)
137
138          # write it back sorted, instead of using ConfigParser
139          sections = self.data.sections()
140          sections.sort()
141          for section in sections:
142             file_descriptor.write(u"[" + section + u"]\n")
143             options = self.data.options(section)
144             options.sort()
145             for option in options:
146                file_descriptor.write(
147                   option + u" = " + self.data.get(section, option) + u"\n"
148                )
149             file_descriptor.write(u"\n")
150
151          # flush and close the file
152          file_descriptor.flush()
153          file_descriptor.close()
154
155          # unset the modified flag
156          self.modified = False
157    def is_writeable(self):
158       u"""Returns True if the __control__ read_only is False."""
159       return not self.data.has_option(
160          u"__control__", u"read_only"
161       ) or not self.data.getboolean(
162          u"__control__", u"read_only"
163       )
164
165 def find_file(
166    file_name=None,
167    root_path=None,
168    search_path=None,
169    default_dir=None,
170    relative=None,
171    universe=None
172 ):
173    u"""Return an absolute file path based on configuration."""
174    import os, os.path, sys
175
176    # make sure to get rid of any surrounding quotes first thing
177    if file_name: file_name = file_name.strip(u"\"'")
178
179    # this is all unnecessary if it's already absolute
180    if file_name and os.path.isabs(file_name):
181       return os.path.realpath(file_name)
182
183    # when no file name is specified, look for <argv[0]>.conf
184    elif not file_name: file_name = os.path.basename( sys.argv[0] ) + u".conf"
185
186    # if a universe was provided, try to get some defaults from there
187    if universe:
188
189       if hasattr(
190          universe,
191          u"contents"
192       ) and u"internal:storage" in universe.contents:
193          storage = universe.categories[u"internal"][u"storage"]
194          if not root_path: root_path = storage.get(u"root_path").strip("\"'")
195          if not search_path: search_path = storage.getlist(u"search_path")
196          if not default_dir:
197             default_dir = storage.get(u"default_dir").strip("\"'")
198
199       # if there's only one file loaded, try to work around a chicken<egg
200       elif hasattr(universe, u"files") and len(
201          universe.files
202       ) == 1 and not universe.files[universe.files.keys()[0]].is_writeable():
203          data_file = universe.files[universe.files.keys()[0]].data
204
205          # try for a fallback default directory
206          if not default_dir and data_file.has_option(
207             u"internal:storage",
208             u"default_dir"
209          ):
210             default_dir = data_file.get(
211                u"internal:storage",
212                u"default_dir"
213             ).strip(u"\"'")
214
215          # try for a fallback root path
216          if not root_path and data_file.has_option(
217             u"internal:storage",
218             u"root_path"
219          ):
220             root_path = data_file.get(
221                u"internal:storage",
222                u"root_path"
223             ).strip(u"\"'")
224
225          # try for a fallback search path
226          if not search_path and data_file.has_option(
227             u"internal:storage",
228             u"search_path"
229          ):
230             search_path = makelist(
231                data_file.get(u"internal:storage", u"search_path").strip(u"\"'")
232             )
233
234       # another fallback root path, this time from the universe startdir
235       if not root_path and hasattr(universe, "startdir"):
236          root_path = universe.startdir
237
238    # when no root path is specified, assume the current working directory
239    if not root_path: root_path = os.getcwd()
240
241    # otherwise, make sure it's absolute
242    elif not os.path.isabs(root_path): root_path = os.path.realpath(root_path)
243
244    # if there's no search path, just use the root path and etc
245    if not search_path: search_path = [root_path, u"etc"]
246
247    # work on a copy of the search path, to avoid modifying the caller's
248    else: search_path = search_path[:]
249
250    # if there's no default path, use the last element of the search path
251    if not default_dir: default_dir = search_path[-1]
252
253    # if an existing file or directory reference was supplied, prepend it
254    if relative:
255       relative = relative.strip(u"\"'")
256       if os.path.isdir(relative): search_path = [relative] + search_path
257       else: search_path = [ os.path.dirname(relative) ] + search_path
258
259    # make the search path entries absolute and throw away any dupes
260    clean_search_path = []
261    for each_path in search_path:
262       each_path = each_path.strip(u"\"'")
263       if not os.path.isabs(each_path):
264          each_path = os.path.realpath( os.path.join(root_path, each_path) )
265       if each_path not in clean_search_path:
266          clean_search_path.append(each_path)
267
268    # start hunting for the file now
269    for each_path in clean_search_path:
270
271       # if the file exists and is readable, we're done
272       if os.path.isfile( os.path.join(each_path, file_name) ):
273          file_name = os.path.realpath( os.path.join(each_path, file_name) )
274          break
275
276    # it didn't exist after all, so use the default path instead
277    if not os.path.isabs(file_name):
278       file_name = os.path.join(default_dir, file_name)
279    if not os.path.isabs(file_name):
280       file_name = os.path.join(root_path, file_name)
281
282    # and normalize it last thing before returning
283    file_name = os.path.realpath(file_name)
284
285    # normalize the resulting file path and hand it back
286    return file_name
287
288 def makelist(value):
289    u"""Turn string into list type."""
290    if value[0] + value[-1] == u"[]": return eval(value)
291    elif value[0] + value[-1] == u"\"\"": return [ value[1:-1] ]
292    else: return [ value ]
293
294 def makedict(value):
295    u"""Turn string into dict type."""
296    if value[0] + value[-1] == u"{}": return eval(value)
297    elif value.find(u":") > 0: return eval(u"{" + value + u"}")
298    else: return { value: None }