comparison roundup/test/memorydb.py @ 6360:a77a7d04ed23

Move memorydb from test to roundup/test .. to allow regression-testing in tracker instances without copying code.
author Ralf Schlatterbeck <rsc@runtux.com>
date Tue, 30 Mar 2021 09:10:46 +0200
parents test/memorydb.py@1e53e3ad9e5c
children 58817c3bf471
comparison
equal deleted inserted replaced
6359:2d42a308927b 6360:a77a7d04ed23
1 '''Implement an in-memory hyperdb for testing purposes.
2 '''
3
4 import shutil
5 import os
6
7 from roundup import date
8 from roundup import hyperdb
9 from roundup import roundupdb
10 from roundup import security
11 from roundup import password
12 from roundup import configuration
13 from roundup.backends import back_anydbm
14 from roundup.backends import indexer_dbm
15 from roundup.backends import sessions_dbm
16 from roundup.backends import indexer_common
17 from roundup.support import ensureParentsExist
18 from roundup.anypy.strings import s2b
19
20 def new_config(debug=False):
21 config = configuration.CoreConfig()
22 config.detectors = configuration.UserConfig("share/roundup/templates/classic/detectors/config.ini")
23 config.ext = configuration.UserConfig("share/roundup/templates/classic/extensions/config.ini")
24 config.DATABASE = "db"
25 #config.logging = MockNull()
26 # these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests
27 if debug:
28 config.LOGGING_LEVEL = "DEBUG"
29 config.MAIL_DOMAIN = "your.tracker.email.domain.example"
30 config.TRACKER_WEB = "http://tracker.example/cgi-bin/roundup.cgi/bugs/"
31 return config
32
33 def create(journaltag, create=True, debug=False):
34 db = Database(new_config(debug), journaltag)
35
36 # load standard schema
37 schema = os.path.join(os.path.dirname(__file__),
38 '../share/roundup/templates/classic/schema.py')
39 vars = hyperdb.__dict__
40 vars['Class'] = Class
41 vars['FileClass'] = FileClass
42 vars['IssueClass'] = IssueClass
43 vars['db'] = db
44 exec(compile(open(schema).read(), schema, 'exec'), vars)
45 initial_data = os.path.join(os.path.dirname(__file__),
46 '../share/roundup/templates/classic/initial_data.py')
47 vars = dict(db=db, admin_email='admin@test.com',
48 adminpw=password.Password('sekrit'))
49 exec(compile(open(initial_data).read(), initial_data, 'exec'), vars)
50
51 # load standard detectors
52 thisdir = os.path.dirname(__file__)
53 dirname = os.path.join(thisdir,
54 '../share/roundup/templates/classic/detectors')
55 for fn in os.listdir(dirname):
56 if not fn.endswith('.py'): continue
57 vars = {}
58 exec(compile(open(os.path.join(dirname, fn)).read(),
59 os.path.join(dirname, fn), 'exec'), vars)
60 vars['init'](db)
61
62 vars = {}
63 exec(compile(open(os.path.join(thisdir, "tx_Source_detector.py")).read(),
64 os.path.join(thisdir, "tx_Source_detector.py"), 'exec'), vars)
65 vars['init'](db)
66
67 '''
68 status = Class(db, "status", name=String())
69 status.setkey("name")
70 priority = Class(db, "priority", name=String(), order=String())
71 priority.setkey("name")
72 keyword = Class(db, "keyword", name=String(), order=String())
73 keyword.setkey("name")
74 user = Class(db, "user", username=String(), password=Password(),
75 assignable=Boolean(), age=Number(), roles=String(), address=String(),
76 supervisor=Link('user'),realname=String(),alternate_addresses=String())
77 user.setkey("username")
78 file = FileClass(db, "file", name=String(), type=String(),
79 comment=String(indexme="yes"), fooz=Password())
80 file_nidx = FileClass(db, "file_nidx", content=String(indexme='no'))
81 issue = IssueClass(db, "issue", title=String(indexme="yes"),
82 status=Link("status"), nosy=Multilink("user"), deadline=Date(),
83 foo=Interval(), files=Multilink("file"), assignedto=Link('user'),
84 priority=Link('priority'), spam=Multilink('msg'),
85 feedback=Link('msg'))
86 stuff = Class(db, "stuff", stuff=String())
87 session = Class(db, 'session', title=String())
88 msg = FileClass(db, "msg", date=Date(),
89 author=Link("user", do_journal='no'),
90 files=Multilink('file'), inreplyto=String(),
91 messageid=String(), summary=String(),
92 content=String(),
93 recipients=Multilink("user", do_journal='no')
94 )
95 '''
96 if create:
97 db.user.create(username="fred", roles='User',
98 password=password.Password('sekrit'), address='fred@example.com')
99
100 db.security.addPermissionToRole('User', 'Email Access')
101 '''
102 db.security.addPermission(name='Register', klass='user')
103 db.security.addPermissionToRole('User', 'Web Access')
104 db.security.addPermissionToRole('Anonymous', 'Email Access')
105 db.security.addPermissionToRole('Anonymous', 'Register', 'user')
106 for cl in 'issue', 'file', 'msg', 'keyword':
107 db.security.addPermissionToRole('User', 'View', cl)
108 db.security.addPermissionToRole('User', 'Edit', cl)
109 db.security.addPermissionToRole('User', 'Create', cl)
110 for cl in 'priority', 'status':
111 db.security.addPermissionToRole('User', 'View', cl)
112 '''
113 return db
114
115 class cldb(dict):
116 def __init__(self, **values):
117 super(cldb, self).__init__()
118 for key, value in values.items():
119 super(cldb, self).__setitem__(s2b(key), value)
120 def __getitem__(self, key):
121 return super(cldb, self).__getitem__(s2b(key))
122 def __setitem__(self, key, value):
123 return super(cldb, self).__setitem__(s2b(key), value)
124 def __delitem__(self, key):
125 return super(cldb, self).__delitem__(s2b(key))
126 def __contains__(self, key):
127 return super(cldb, self).__contains__(s2b(key))
128 def close(self):
129 pass
130
131 class BasicDatabase(dict):
132 ''' Provide a nice encapsulation of an anydbm store.
133
134 Keys are id strings, values are automatically marshalled data.
135 '''
136 def __init__(self, **values):
137 super(BasicDatabase, self).__init__()
138 for k, v in values.items():
139 super(BasicDatabase, self).__setitem__(s2b(k), v)
140 def __getitem__(self, key):
141 if key not in self:
142 d = self[key] = {}
143 return d
144 return super(BasicDatabase, self).__getitem__(s2b(key))
145 def __setitem__(self, key, value):
146 return super(BasicDatabase, self).__setitem__(s2b(key), value)
147 def __delitem__(self, key):
148 return super(BasicDatabase, self).__delitem__(s2b(key))
149 def __contains__(self, key):
150 return super(BasicDatabase, self).__contains__(s2b(key))
151 def exists(self, infoid):
152 return infoid in self
153 def get(self, infoid, value, default=None):
154 return self[infoid].get(value, default)
155 def getall(self, infoid):
156 if infoid not in self:
157 raise KeyError(infoid)
158 return self[infoid]
159 def set(self, infoid, **newvalues):
160 self[infoid].update(newvalues)
161 def list(self):
162 return list(self.keys())
163 def destroy(self, infoid):
164 del self[infoid]
165 def commit(self):
166 pass
167 def close(self):
168 pass
169 def updateTimestamp(self, sessid):
170 pass
171 def clean(self):
172 pass
173
174 class Sessions(BasicDatabase, sessions_dbm.Sessions):
175 name = 'sessions'
176
177 class OneTimeKeys(BasicDatabase, sessions_dbm.Sessions):
178 name = 'otks'
179
180 class Indexer(indexer_dbm.Indexer):
181 def __init__(self, db):
182 indexer_common.Indexer.__init__(self, db)
183 self.reindex = 0
184 self.quiet = 9
185 self.changed = 0
186
187 def load_index(self, reload=0, wordlist=None):
188 # Unless reload is indicated, do not load twice
189 if self.index_loaded() and not reload:
190 return 0
191 self.words = {}
192 self.files = {'_TOP':(0,None)}
193 self.fileids = {}
194 self.changed = 0
195
196 def save_index(self):
197 pass
198 def force_reindex(self):
199 # TODO I'm concerned that force_reindex may not be tested by
200 # testForcedReindexing if the functionality can just be removed
201 pass
202
203 class Database(back_anydbm.Database):
204 """A database for storing records containing flexible data types.
205
206 Transaction stuff TODO:
207
208 - check the timestamp of the class file and nuke the cache if it's
209 modified. Do some sort of conflict checking on the dirty stuff.
210 - perhaps detect write collisions (related to above)?
211 """
212
213 dbtype = "memorydb"
214
215 def __init__(self, config, journaltag=None):
216 self.config, self.journaltag = config, journaltag
217 self.classes = {}
218 self.items = {}
219 self.ids = {}
220 self.journals = {}
221 self.files = {}
222 self.tx_files = {}
223 self.security = security.Security(self)
224 self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0,
225 'filtering': 0}
226 self.sessions = Sessions()
227 self.otks = OneTimeKeys()
228 self.indexer = Indexer(self)
229
230 # anydbm bits
231 self.cache = {} # cache of nodes loaded or created
232 self.dirtynodes = {} # keep track of the dirty nodes by class
233 self.newnodes = {} # keep track of the new nodes by class
234 self.destroyednodes = {}# keep track of the destroyed nodes by class
235 self.transactions = []
236 self.tx_Source = None
237
238 def filename(self, classname, nodeid, property=None, create=0):
239 shutil.copyfile(__file__, __file__+'.dummy')
240 return __file__+'.dummy'
241
242 def filesize(self, classname, nodeid, property=None, create=0):
243 return len(self.getfile(classname, nodeid, property))
244
245 def post_init(self):
246 super(Database, self).post_init()
247
248 def refresh_database(self):
249 pass
250
251 def getSessionManager(self):
252 return self.sessions
253
254 def getOTKManager(self):
255 return self.otks
256
257 def reindex(self, classname=None, show_progress=False):
258 pass
259
260 def __repr__(self):
261 return '<memorydb instance at %x>'%id(self)
262
263 def storefile(self, classname, nodeid, property, content):
264 if isinstance(content, str):
265 content = s2b(content)
266 self.tx_files[classname, nodeid, property] = content
267 self.transactions.append((self.doStoreFile, (classname, nodeid,
268 property)))
269
270 def getfile(self, classname, nodeid, property):
271 if (classname, nodeid, property) in self.tx_files:
272 return self.tx_files[classname, nodeid, property]
273 return self.files[classname, nodeid, property]
274
275 def doStoreFile(self, classname, nodeid, property, **databases):
276 self.files[classname, nodeid, property] = self.tx_files[classname, nodeid, property]
277 return (classname, nodeid)
278
279 def rollbackStoreFile(self, classname, nodeid, property, **databases):
280 del self.tx_files[classname, nodeid, property]
281
282 def numfiles(self):
283 return len(self.files) + len(self.tx_files)
284
285 def close(self):
286 self.clearCache()
287 self.tx_files = {}
288 # kill the schema too
289 self.classes = {}
290 # just keep the .items
291
292 #
293 # Classes
294 #
295 def __getattr__(self, classname):
296 """A convenient way of calling self.getclass(classname)."""
297 if classname in self.classes:
298 return self.classes[classname]
299 raise AttributeError(classname)
300
301 def addclass(self, cl):
302 cn = cl.classname
303 if cn in self.classes:
304 raise ValueError('Class "%s" already defined.'%cn)
305 self.classes[cn] = cl
306 if cn not in self.items:
307 self.items[cn] = cldb()
308 self.ids[cn] = 0
309
310 # add default Edit and View permissions
311 self.security.addPermission(name="Create", klass=cn,
312 description="User is allowed to create "+cn)
313 self.security.addPermission(name="Edit", klass=cn,
314 description="User is allowed to edit "+cn)
315 self.security.addPermission(name="View", klass=cn,
316 description="User is allowed to access "+cn)
317
318 def getclasses(self):
319 """Return a list of the names of all existing classes."""
320 return sorted(self.classes.keys())
321
322 def getclass(self, classname):
323 """Get the Class object representing a particular class.
324
325 If 'classname' is not a valid class name, a KeyError is raised.
326 """
327 try:
328 return self.classes[classname]
329 except KeyError:
330 raise KeyError('There is no class called "%s"'%classname)
331
332 #
333 # Class DBs
334 #
335 def clear(self):
336 self.items = {}
337
338 def getclassdb(self, classname, mode='r'):
339 """ grab a connection to the class db that will be used for
340 multiple actions
341 """
342 return self.items[classname]
343
344 def getCachedJournalDB(self, classname):
345 return self.journals.setdefault(classname, {})
346
347 #
348 # Node IDs
349 #
350 def newid(self, classname):
351 self.ids[classname] += 1
352 return str(self.ids[classname])
353 def setid(self, classname, id):
354 self.ids[classname] = int(id)
355
356 #
357 # Journal
358 #
359 def doSaveJournal(self, classname, nodeid, action, params, creator,
360 creation):
361 if creator is None:
362 creator = self.getuid()
363 if creation is None:
364 creation = date.Date()
365 self.journals.setdefault(classname, {}).setdefault(nodeid,
366 []).append((nodeid, creation, creator, action, params))
367
368 def doSetJournal(self, classname, nodeid, journal):
369 self.journals.setdefault(classname, {})[nodeid] = journal
370
371 def getjournal(self, classname, nodeid):
372 # our journal result
373 res = []
374
375 # add any journal entries for transactions not committed to the
376 # database
377 for method, args in self.transactions:
378 if method != self.doSaveJournal:
379 continue
380 (cache_classname, cache_nodeid, cache_action, cache_params,
381 cache_creator, cache_creation) = args
382 if cache_classname == classname and cache_nodeid == nodeid:
383 if not cache_creator:
384 cache_creator = self.getuid()
385 if not cache_creation:
386 cache_creation = date.Date()
387 res.append((cache_nodeid, cache_creation, cache_creator,
388 cache_action, cache_params))
389 try:
390 res += self.journals.get(classname, {})[nodeid]
391 except KeyError:
392 if res: return res
393 raise IndexError(nodeid)
394 return res
395
396 def pack(self, pack_before):
397 """ Delete all journal entries except "create" before 'pack_before'.
398 """
399 pack_before = pack_before.serialise()
400 for classname in self.journals:
401 db = self.journals[classname]
402 for key in db:
403 # get the journal for this db entry
404 l = []
405 last_set_entry = None
406 for entry in db[key]:
407 # unpack the entry
408 (nodeid, date_stamp, self.journaltag, action,
409 params) = entry
410 date_stamp = date_stamp.serialise()
411 # if the entry is after the pack date, _or_ the initial
412 # create entry, then it stays
413 if date_stamp > pack_before or action == 'create':
414 l.append(entry)
415 db[key] = l
416
417 class Class(back_anydbm.Class):
418 pass
419
420 class FileClass(back_anydbm.FileClass):
421 def __init__(self, db, classname, **properties):
422 if 'content' not in properties:
423 properties['content'] = hyperdb.String(indexme='yes')
424 if 'type' not in properties:
425 properties['type'] = hyperdb.String()
426 back_anydbm.Class.__init__(self, db, classname, **properties)
427
428 def export_files(self, dirname, nodeid):
429 dest = self.exportFilename(dirname, nodeid)
430 ensureParentsExist(dest)
431 f = open(dest, 'wb')
432 f.write(self.db.files[self.classname, nodeid, None])
433 f.close()
434
435 def import_files(self, dirname, nodeid):
436 source = self.exportFilename(dirname, nodeid)
437 f = open(source, 'rb')
438 self.db.files[self.classname, nodeid, None] = f.read()
439 f.close()
440 mime_type = None
441 props = self.getprops()
442 if 'type' in props:
443 mime_type = self.get(nodeid, 'type')
444 if not mime_type:
445 mime_type = self.default_mime_type
446 if props['content'].indexme:
447 self.db.indexer.add_text((self.classname, nodeid, 'content'),
448 self.get(nodeid, 'content'), mime_type)
449
450 # deviation from spec - was called ItemClass
451 class IssueClass(Class, roundupdb.IssueClass):
452 # Overridden methods:
453 def __init__(self, db, classname, **properties):
454 """The newly-created class automatically includes the "messages",
455 "files", "nosy", and "superseder" properties. If the 'properties'
456 dictionary attempts to specify any of these properties or a
457 "creation" or "activity" property, a ValueError is raised.
458 """
459 if 'title' not in properties:
460 properties['title'] = hyperdb.String(indexme='yes')
461 if 'messages' not in properties:
462 properties['messages'] = hyperdb.Multilink("msg")
463 if 'files' not in properties:
464 properties['files'] = hyperdb.Multilink("file")
465 if 'nosy' not in properties:
466 # note: journalling is turned off as it really just wastes
467 # space. this behaviour may be overridden in an instance
468 properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
469 if 'superseder' not in properties:
470 properties['superseder'] = hyperdb.Multilink(classname)
471 Class.__init__(self, db, classname, **properties)
472
473 # vim: set et sts=4 sw=4 :

Roundup Issue Tracker: http://roundup-tracker.org/