Mercurial > p > roundup > code
comparison roundup/admin.py @ 7250:ee972b3073cb
alphabetize all do_X methods.
Makes it easier to figure out where the method is defined and provides
a structure for adding new methods.
No functional changes just moving existing functions around.
| author | John Rouillard <rouilj@ieee.org> |
|---|---|
| date | Wed, 12 Apr 2023 09:52:51 -0400 |
| parents | bd2dc1484b39 |
| children | 9c067ed4568b |
comparison
equal
deleted
inserted
replaced
| 7249:bd2dc1484b39 | 7250:ee972b3073cb |
|---|---|
| 264 print(_('%s:') % name) | 264 print(_('%s:') % name) |
| 265 print(' ', _(command.__doc__)) | 265 print(' ', _(command.__doc__)) |
| 266 | 266 |
| 267 nl_re = re.compile('[\r\n]') | 267 nl_re = re.compile('[\r\n]') |
| 268 # indent_re defined above | 268 # indent_re defined above |
| 269 | |
| 270 def do_help(self, args, nl_re=nl_re, indent_re=indent_re): | |
| 271 ''"""Usage: help topic | |
| 272 Give help about topic. | |
| 273 | |
| 274 commands -- list commands | |
| 275 <command> -- help specific to a command | |
| 276 initopts -- init command options | |
| 277 all -- all available help | |
| 278 """ | |
| 279 if len(args) > 0: | |
| 280 topic = args[0] | |
| 281 else: | |
| 282 topic = 'help' | |
| 283 | |
| 284 # try help_ methods | |
| 285 if topic in self.help: | |
| 286 self.help[topic]() | |
| 287 return 0 | |
| 288 | |
| 289 # try command docstrings | |
| 290 try: | |
| 291 cmd_docs = self.commands.get(topic) | |
| 292 except KeyError: | |
| 293 print(_('Sorry, no help for "%(topic)s"') % locals()) | |
| 294 return 1 | |
| 295 | |
| 296 # display the help for each match, removing the docstring indent | |
| 297 for _name, help in cmd_docs: | |
| 298 lines = nl_re.split(_(help.__doc__)) | |
| 299 print(lines[0]) | |
| 300 indent = indent_re.match(lines[1]) | |
| 301 if indent: indent = len(indent.group(1)) # noqa: E701 | |
| 302 for line in lines[1:]: | |
| 303 if indent: | |
| 304 print(line[indent:]) | |
| 305 else: | |
| 306 print(line) | |
| 307 return 0 | |
| 308 | 269 |
| 309 def listTemplates(self, trace_search=False): | 270 def listTemplates(self, trace_search=False): |
| 310 """ List all the available templates. | 271 """ List all the available templates. |
| 311 | 272 |
| 312 Look in the following places, where the later rules take precedence: | 273 Look in the following places, where the later rules take precedence: |
| 408 print(_('Templates:'), ', '.join(templates)) | 369 print(_('Templates:'), ', '.join(templates)) |
| 409 import roundup.backends | 370 import roundup.backends |
| 410 backends = roundup.backends.list_backends() | 371 backends = roundup.backends.list_backends() |
| 411 print(_('Back ends:'), ', '.join(backends)) | 372 print(_('Back ends:'), ', '.join(backends)) |
| 412 | 373 |
| 413 def do_install(self, tracker_home, args): | |
| 414 ''"""Usage: install [template [backend [key=val[,key=val]]]] | |
| 415 Install a new Roundup tracker. | |
| 416 | |
| 417 The command will prompt for the tracker home directory | |
| 418 (if not supplied through TRACKER_HOME or the -i option). | |
| 419 The template and backend may be specified on the command-line | |
| 420 as arguments, in that order. | |
| 421 | |
| 422 Command line arguments following the backend allows you to | |
| 423 pass initial values for config options. For example, passing | |
| 424 "web_http_auth=no,rdbms_user=dinsdale" will override defaults | |
| 425 for options http_auth in section [web] and user in section [rdbms]. | |
| 426 Please be careful to not use spaces in this argument! (Enclose | |
| 427 whole argument in quotes if you need spaces in option value). | |
| 428 | |
| 429 The initialise command must be called after this command in order | |
| 430 to initialise the tracker's database. You may edit the tracker's | |
| 431 initial database contents before running that command by editing | |
| 432 the tracker's dbinit.py module init() function. | |
| 433 | |
| 434 See also initopts help. | |
| 435 """ | |
| 436 if len(args) < 1: | |
| 437 raise UsageError(_('Not enough arguments supplied')) | |
| 438 | |
| 439 # make sure the tracker home can be created | |
| 440 tracker_home = os.path.abspath(tracker_home) | |
| 441 parent = os.path.split(tracker_home)[0] | |
| 442 if not os.path.exists(parent): | |
| 443 raise UsageError(_('Instance home parent directory "%(parent)s"' | |
| 444 ' does not exist') % locals()) | |
| 445 | |
| 446 config_ini_file = os.path.join(tracker_home, CoreConfig.INI_FILE) | |
| 447 # check for both old- and new-style configs | |
| 448 if list(filter(os.path.exists, [config_ini_file, | |
| 449 os.path.join(tracker_home, 'config.py')])): | |
| 450 if not self.force: | |
| 451 ok = self.my_input(_( | |
| 452 """WARNING: There appears to be a tracker in "%(tracker_home)s"! | |
| 453 If you re-install it, you will lose all the data! | |
| 454 Erase it? Y/N: """) % locals()) # noqa: E122 | |
| 455 if ok.strip().lower() != 'y': | |
| 456 return 0 | |
| 457 | |
| 458 # clear it out so the install isn't confused | |
| 459 shutil.rmtree(tracker_home) | |
| 460 | |
| 461 # select template | |
| 462 templates = self.listTemplates() | |
| 463 template = self._get_choice( | |
| 464 list_name=_('Templates:'), | |
| 465 prompt=_('Select template'), | |
| 466 options=templates, | |
| 467 argument=len(args) > 1 and args[1] or '', | |
| 468 default='classic') | |
| 469 | |
| 470 # select hyperdb backend | |
| 471 import roundup.backends | |
| 472 backends = roundup.backends.list_backends() | |
| 473 backend = self._get_choice( | |
| 474 list_name=_('Back ends:'), | |
| 475 prompt=_('Select backend'), | |
| 476 options=backends, | |
| 477 argument=len(args) > 2 and args[2] or '', | |
| 478 default='anydbm') | |
| 479 # XXX perform a unit test based on the user's selections | |
| 480 | |
| 481 # Process configuration file definitions | |
| 482 if len(args) > 3: | |
| 483 try: | |
| 484 defns = dict([item.split("=") for item in args[3].split(",")]) | |
| 485 except Exception: | |
| 486 print(_('Error in configuration settings: "%s"') % args[3]) | |
| 487 raise | |
| 488 else: | |
| 489 defns = {} | |
| 490 | |
| 491 defns['rdbms_backend'] = backend | |
| 492 | |
| 493 # load config_ini.ini from template if it exists. | |
| 494 # it sets parameters like template_engine that are | |
| 495 # template specific. | |
| 496 template_config = UserConfig(templates[template]['path'] + | |
| 497 "/config_ini.ini") | |
| 498 for k in template_config.keys(): | |
| 499 if k == 'HOME': # ignore home. It is a default param. | |
| 500 continue | |
| 501 defns[k] = template_config[k] | |
| 502 | |
| 503 # install! | |
| 504 init.install(tracker_home, templates[template]['path'], settings=defns) | |
| 505 | |
| 506 # Remove config_ini.ini file from tracker_home (not template dir). | |
| 507 # Ignore file not found - not all templates have | |
| 508 # config_ini.ini files. | |
| 509 try: | |
| 510 os.remove(tracker_home + "/config_ini.ini") | |
| 511 except OSError as e: # FileNotFound exception under py3 | |
| 512 if e.errno == 2: | |
| 513 pass | |
| 514 else: | |
| 515 raise | |
| 516 | |
| 517 print(_(""" | |
| 518 --------------------------------------------------------------------------- | |
| 519 You should now edit the tracker configuration file: | |
| 520 %(config_file)s""") % {"config_file": config_ini_file}) | |
| 521 | |
| 522 # find list of options that need manual adjustments | |
| 523 # XXX config._get_unset_options() is marked as private | |
| 524 # (leading underscore). make it public or don't care? | |
| 525 need_set = CoreConfig(tracker_home)._get_unset_options() | |
| 526 if need_set: | |
| 527 print(_(" ... at a minimum, you must set following options:")) | |
| 528 for section in need_set: | |
| 529 print(" [%s]: %s" % (section, ", ".join(need_set[section]))) | |
| 530 | |
| 531 # note about schema modifications | |
| 532 print(_(""" | |
| 533 If you wish to modify the database schema, | |
| 534 you should also edit the schema file: | |
| 535 %(database_config_file)s | |
| 536 You may also change the database initialisation file: | |
| 537 %(database_init_file)s | |
| 538 ... see the documentation on customizing for more information. | |
| 539 | |
| 540 You MUST run the "roundup-admin initialise" command once you've performed | |
| 541 the above steps. | |
| 542 --------------------------------------------------------------------------- | |
| 543 """) % {'database_config_file': os.path.join(tracker_home, 'schema.py'), | |
| 544 'database_init_file': os.path.join(tracker_home, 'initial_data.py')}) \ | |
| 545 # noqa: E122 | |
| 546 return 0 | |
| 547 | |
| 548 def _get_choice(self, list_name, prompt, options, argument, default=None): | 374 def _get_choice(self, list_name, prompt, options, argument, default=None): |
| 549 if default is None: | 375 if default is None: |
| 550 default = options[0] # just pick the first one | 376 default = options[0] # just pick the first one |
| 551 if argument in options: | 377 if argument in options: |
| 552 return argument | 378 return argument |
| 557 argument = self.my_input('%s [%s]: ' % (prompt, default)) | 383 argument = self.my_input('%s [%s]: ' % (prompt, default)) |
| 558 if not argument: | 384 if not argument: |
| 559 return default | 385 return default |
| 560 return argument | 386 return argument |
| 561 | 387 |
| 388 def do_commit(self, args): | |
| 389 ''"""Usage: commit | |
| 390 Commit changes made to the database during an interactive session. | |
| 391 | |
| 392 The changes made during an interactive session are not | |
| 393 automatically written to the database - they must be committed | |
| 394 using this command. | |
| 395 | |
| 396 One-off commands on the command-line are automatically committed if | |
| 397 they are successful. | |
| 398 """ | |
| 399 self.db.commit() | |
| 400 self.db_uncommitted = False | |
| 401 return 0 | |
| 402 | |
| 403 def do_create(self, args): | |
| 404 ''"""Usage: create classname property=value ... | |
| 405 Create a new entry of a given class. | |
| 406 | |
| 407 This creates a new entry of the given class using the property | |
| 408 name=value arguments provided on the command line after the "create" | |
| 409 command. | |
| 410 """ | |
| 411 if len(args) < 1: | |
| 412 raise UsageError(_('Not enough arguments supplied')) | |
| 413 from roundup import hyperdb | |
| 414 | |
| 415 classname = args[0] | |
| 416 | |
| 417 # get the class | |
| 418 cl = self.get_class(classname) | |
| 419 | |
| 420 # now do a create | |
| 421 props = {} | |
| 422 properties = cl.getprops(protected=0) | |
| 423 if len(args) == 1: | |
| 424 # ask for the properties | |
| 425 for key in properties: | |
| 426 if key == 'id': continue # noqa: E701 | |
| 427 value = properties[key] | |
| 428 name = value.__class__.__name__ | |
| 429 if isinstance(value, hyperdb.Password): | |
| 430 again = None | |
| 431 while value != again: | |
| 432 value = getpass.getpass(_('%(propname)s (Password): ') | |
| 433 % | |
| 434 {'propname': key.capitalize()}) | |
| 435 again = getpass.getpass(_(' %(propname)s (Again): ') | |
| 436 % | |
| 437 {'propname': key.capitalize()}) | |
| 438 if value != again: | |
| 439 print(_('Sorry, try again...')) | |
| 440 if value: | |
| 441 props[key] = value | |
| 442 else: | |
| 443 value = self.my_input(_('%(propname)s (%(proptype)s): ') % { | |
| 444 'propname': key.capitalize(), 'proptype': name}) | |
| 445 if value: | |
| 446 props[key] = value | |
| 447 else: | |
| 448 props = self.props_from_args(args[1:]) | |
| 449 | |
| 450 # convert types | |
| 451 for propname in props: | |
| 452 try: | |
| 453 props[propname] = hyperdb.rawToHyperdb(self.db, cl, None, | |
| 454 propname, | |
| 455 props[propname]) | |
| 456 except hyperdb.HyperdbValueError as message: | |
| 457 raise UsageError(message) | |
| 458 | |
| 459 # check for the key property | |
| 460 propname = cl.getkey() | |
| 461 if propname and propname not in props: | |
| 462 raise UsageError(_('you must provide the "%(propname)s" ' | |
| 463 'property.') % locals()) | |
| 464 | |
| 465 # do the actual create | |
| 466 try: | |
| 467 sys.stdout.write(cl.create(**props) + '\n') | |
| 468 except (TypeError, IndexError, ValueError) as message: | |
| 469 raise UsageError(message) | |
| 470 self.db_uncommitted = True | |
| 471 return 0 | |
| 472 | |
| 473 def do_display(self, args): | |
| 474 ''"""Usage: display designator[,designator]* | |
| 475 | |
| 476 Show the property values for the given node(s). | |
| 477 | |
| 478 A designator is a classname and a nodeid concatenated, | |
| 479 eg. bug1, user10, ... | |
| 480 | |
| 481 This lists the properties and their associated values | |
| 482 for the given node. | |
| 483 """ | |
| 484 if len(args) < 1: | |
| 485 raise UsageError(_('Not enough arguments supplied')) | |
| 486 | |
| 487 # decode the node designator | |
| 488 for designator in args[0].split(','): | |
| 489 try: | |
| 490 classname, nodeid = hyperdb.splitDesignator(designator) | |
| 491 except hyperdb.DesignatorError as message: | |
| 492 raise UsageError(message) | |
| 493 | |
| 494 # get the class | |
| 495 cl = self.get_class(classname) | |
| 496 | |
| 497 # display the values | |
| 498 keys = sorted(cl.properties) | |
| 499 for key in keys: | |
| 500 value = cl.get(nodeid, key) | |
| 501 print(_('%(key)s: %(value)s') % locals()) | |
| 502 | |
| 503 def do_export(self, args, export_files=True): | |
| 504 ''"""Usage: export [[-]class[,class]] export_dir | |
| 505 Export the database to colon-separated-value files. | |
| 506 To exclude the files (e.g. for the msg or file class), | |
| 507 use the exporttables command. | |
| 508 | |
| 509 Optionally limit the export to just the named classes | |
| 510 or exclude the named classes, if the 1st argument starts with '-'. | |
| 511 | |
| 512 This action exports the current data from the database into | |
| 513 colon-separated-value files that are placed in the nominated | |
| 514 destination directory. | |
| 515 """ | |
| 516 # grab the directory to export to | |
| 517 if len(args) < 1: | |
| 518 raise UsageError(_('Not enough arguments supplied')) | |
| 519 | |
| 520 dir = args[-1] | |
| 521 | |
| 522 # get the list of classes to export | |
| 523 if len(args) == 2: | |
| 524 if args[0].startswith('-'): | |
| 525 classes = [c for c in self.db.classes | |
| 526 if c not in args[0][1:].split(',')] | |
| 527 else: | |
| 528 classes = args[0].split(',') | |
| 529 else: | |
| 530 classes = self.db.classes | |
| 531 | |
| 532 class colon_separated(csv.excel): | |
| 533 delimiter = ':' | |
| 534 | |
| 535 # make sure target dir exists | |
| 536 if not os.path.exists(dir): | |
| 537 os.makedirs(dir) | |
| 538 | |
| 539 # maximum csv field length exceeding configured size? | |
| 540 max_len = self.db.config.CSV_FIELD_SIZE | |
| 541 | |
| 542 # do all the classes specified | |
| 543 for classname in classes: | |
| 544 cl = self.get_class(classname) | |
| 545 | |
| 546 if not export_files and hasattr(cl, 'export_files'): | |
| 547 sys.stdout.write('Exporting %s WITHOUT the files\r\n' % | |
| 548 classname) | |
| 549 | |
| 550 with open(os.path.join(dir, classname+'.csv'), 'w') as f: | |
| 551 writer = csv.writer(f, colon_separated) | |
| 552 | |
| 553 propnames = cl.export_propnames() | |
| 554 fields = propnames[:] | |
| 555 fields.append('is retired') | |
| 556 writer.writerow(fields) | |
| 557 | |
| 558 # If a node has a key, sort all nodes by key | |
| 559 # with retired nodes first. Retired nodes | |
| 560 # must occur before a non-retired node with | |
| 561 # the same key. Otherwise you get an | |
| 562 # IntegrityError: UNIQUE constraint failed: | |
| 563 # _class.__retired__, _<class>._<keyname> | |
| 564 # on imports to rdbms. | |
| 565 all_nodes = cl.getnodeids() | |
| 566 | |
| 567 classkey = cl.getkey() | |
| 568 if classkey: # False sorts before True, so negate is_retired | |
| 569 keysort = lambda i: (cl.get(i, classkey), # noqa: E731 | |
| 570 not cl.is_retired(i)) | |
| 571 all_nodes.sort(key=keysort) | |
| 572 # if there is no classkey no need to sort | |
| 573 | |
| 574 for nodeid in all_nodes: | |
| 575 if self.verbose: | |
| 576 sys.stdout.write('\rExporting %s - %s' % | |
| 577 (classname, nodeid)) | |
| 578 sys.stdout.flush() | |
| 579 node = cl.getnode(nodeid) | |
| 580 exp = cl.export_list(propnames, nodeid) | |
| 581 lensum = sum([len(repr_export(node[p])) for | |
| 582 p in propnames]) | |
| 583 # for a safe upper bound of field length we add | |
| 584 # difference between CSV len and sum of all field lengths | |
| 585 d = sum([len(x) for x in exp]) - lensum | |
| 586 if not d > 0: | |
| 587 raise AssertionError("Bad assertion d > 0") | |
| 588 for p in propnames: | |
| 589 ll = len(repr_export(node[p])) + d | |
| 590 if ll > max_len: | |
| 591 max_len = ll | |
| 592 writer.writerow(exp) | |
| 593 if export_files and hasattr(cl, 'export_files'): | |
| 594 cl.export_files(dir, nodeid) | |
| 595 | |
| 596 # export the journals | |
| 597 with open(os.path.join(dir, classname+'-journals.csv'), 'w') as jf: | |
| 598 if self.verbose: | |
| 599 sys.stdout.write("\nExporting Journal for %s\n" % | |
| 600 classname) | |
| 601 sys.stdout.flush() | |
| 602 journals = csv.writer(jf, colon_separated) | |
| 603 for row in cl.export_journals(): | |
| 604 journals.writerow(row) | |
| 605 if max_len > self.db.config.CSV_FIELD_SIZE: | |
| 606 print("Warning: config csv_field_size should be at least %s" % | |
| 607 max_len, file=sys.stderr) | |
| 608 return 0 | |
| 609 | |
| 610 def do_exporttables(self, args): | |
| 611 ''"""Usage: exporttables [[-]class[,class]] export_dir | |
| 612 Export the database to colon-separated-value files, excluding the | |
| 613 files below $TRACKER_HOME/db/files/ (which can be archived separately). | |
| 614 To include the files, use the export command. | |
| 615 | |
| 616 Optionally limit the export to just the named classes | |
| 617 or exclude the named classes, if the 1st argument starts with '-'. | |
| 618 | |
| 619 This action exports the current data from the database into | |
| 620 colon-separated-value files that are placed in the nominated | |
| 621 destination directory. | |
| 622 """ | |
| 623 return self.do_export(args, export_files=False) | |
| 624 | |
| 625 def do_filter(self, args): | |
| 626 ''"""Usage: filter classname propname=value ... | |
| 627 Find the nodes of the given class with a given property value. | |
| 628 | |
| 629 Find the nodes of the given class with a given property value. | |
| 630 Multiple values can be specified by separating them with commas. | |
| 631 If property is a string, all values must match. I.E. it's an | |
| 632 'and' operation. If the property is a link/multilink any value | |
| 633 matches. I.E. an 'or' operation. | |
| 634 """ | |
| 635 if len(args) < 1: | |
| 636 raise UsageError(_('Not enough arguments supplied')) | |
| 637 classname = args[0] | |
| 638 # get the class | |
| 639 cl = self.get_class(classname) | |
| 640 | |
| 641 # handle the propname=value argument | |
| 642 props = self.props_from_args(args[1:]) | |
| 643 | |
| 644 # convert the user-input value to a value used for filter | |
| 645 # multiple , separated values become a list | |
| 646 for propname, value in props.items(): | |
| 647 if ',' in value: | |
| 648 values = value.split(',') | |
| 649 else: | |
| 650 values = [value] | |
| 651 | |
| 652 props[propname] = [] | |
| 653 # start handling transitive props | |
| 654 # given filter issue assignedto.roles=Admin | |
| 655 # start at issue | |
| 656 curclass = cl | |
| 657 lastprop = propname # handle case 'issue assignedto=admin' | |
| 658 if '.' in propname: | |
| 659 # start splitting transitive prop into components | |
| 660 # we end when we have no more links | |
| 661 for pn in propname.split('.'): | |
| 662 try: | |
| 663 lastprop = pn # get current component | |
| 664 # get classname for this link | |
| 665 try: | |
| 666 curclassname = curclass.getprops()[pn].classname | |
| 667 except KeyError: | |
| 668 raise UsageError(_( | |
| 669 "Class %(curclassname)s has " | |
| 670 "no property %(pn)s in %(propname)s." % | |
| 671 locals())) | |
| 672 # get class object | |
| 673 curclass = self.get_class(curclassname) | |
| 674 except AttributeError: | |
| 675 # curclass.getprops()[pn].classname raises this | |
| 676 # when we are at a non link/multilink property | |
| 677 pass | |
| 678 | |
| 679 for value in values: | |
| 680 val = hyperdb.rawToHyperdb(self.db, curclass, None, | |
| 681 lastprop, value) | |
| 682 props[propname].append(val) | |
| 683 | |
| 684 # now do the filter | |
| 685 try: | |
| 686 id = [] | |
| 687 designator = [] | |
| 688 props = {"filterspec": props} | |
| 689 | |
| 690 if self.separator: | |
| 691 if self.print_designator: | |
| 692 id = cl.filter(None, **props) | |
| 693 for i in id: | |
| 694 designator.append(classname + i) | |
| 695 print(self.separator.join(designator)) | |
| 696 else: | |
| 697 print(self.separator.join(cl.filter(None, **props))) | |
| 698 else: | |
| 699 if self.print_designator: | |
| 700 id = cl.filter(None, **props) | |
| 701 for i in id: | |
| 702 designator.append(classname + i) | |
| 703 print(designator) | |
| 704 else: | |
| 705 print(cl.filter(None, **props)) | |
| 706 except KeyError: | |
| 707 raise UsageError(_('%(classname)s has no property ' | |
| 708 '"%(propname)s"') % locals()) | |
| 709 except (ValueError, TypeError) as message: | |
| 710 raise UsageError(message) | |
| 711 return 0 | |
| 712 | |
| 713 def do_find(self, args): | |
| 714 ''"""Usage: find classname propname=value ... | |
| 715 Find the nodes of the given class with a given link property value. | |
| 716 | |
| 717 Find the nodes of the given class with a given link property value. | |
| 718 The value may be either the nodeid of the linked node, or its key | |
| 719 value. | |
| 720 """ | |
| 721 if len(args) < 1: | |
| 722 raise UsageError(_('Not enough arguments supplied')) | |
| 723 classname = args[0] | |
| 724 # get the class | |
| 725 cl = self.get_class(classname) | |
| 726 | |
| 727 # handle the propname=value argument | |
| 728 props = self.props_from_args(args[1:]) | |
| 729 | |
| 730 # convert the user-input value to a value used for find() | |
| 731 for propname, value in props.items(): | |
| 732 if ',' in value: | |
| 733 values = value.split(',') | |
| 734 else: | |
| 735 values = [value] | |
| 736 d = props[propname] = {} | |
| 737 for value in values: | |
| 738 value = hyperdb.rawToHyperdb(self.db, cl, None, | |
| 739 propname, value) | |
| 740 if isinstance(value, list): | |
| 741 for entry in value: | |
| 742 d[entry] = 1 | |
| 743 else: | |
| 744 d[value] = 1 | |
| 745 | |
| 746 # now do the find | |
| 747 try: | |
| 748 id = [] | |
| 749 designator = [] | |
| 750 if self.separator: | |
| 751 if self.print_designator: | |
| 752 id = cl.find(**props) | |
| 753 for i in id: | |
| 754 designator.append(classname + i) | |
| 755 print(self.separator.join(designator)) | |
| 756 else: | |
| 757 print(self.separator.join(cl.find(**props))) | |
| 758 | |
| 759 else: | |
| 760 if self.print_designator: | |
| 761 id = cl.find(**props) | |
| 762 for i in id: | |
| 763 designator.append(classname + i) | |
| 764 print(designator) | |
| 765 else: | |
| 766 print(cl.find(**props)) | |
| 767 except KeyError: | |
| 768 raise UsageError(_('%(classname)s has no property ' | |
| 769 '"%(propname)s"') % locals()) | |
| 770 except (ValueError, TypeError) as message: | |
| 771 raise UsageError(message) | |
| 772 return 0 | |
| 773 | |
| 562 def do_genconfig(self, args, update=False): | 774 def do_genconfig(self, args, update=False): |
| 563 ''"""Usage: genconfig <filename> | 775 ''"""Usage: genconfig <filename> |
| 564 Generate a new tracker config file (ini style) with default | 776 Generate a new tracker config file (ini style) with default |
| 565 values in <filename>. | 777 values in <filename>. |
| 566 """ | 778 """ |
| 597 else: | 809 else: |
| 598 # generate default config | 810 # generate default config |
| 599 config = CoreConfig() | 811 config = CoreConfig() |
| 600 | 812 |
| 601 config.save(args[0]) | 813 config.save(args[0]) |
| 602 | |
| 603 def do_updateconfig(self, args): | |
| 604 ''"""Usage: updateconfig <filename> | |
| 605 Generate an updated tracker config file (ini style) in | |
| 606 <filename>. Use current settings from existing roundup | |
| 607 tracker in tracker home. | |
| 608 """ | |
| 609 self.do_genconfig(args, update=True) | |
| 610 | |
| 611 def do_initialise(self, tracker_home, args): | |
| 612 ''"""Usage: initialise [adminpw] | |
| 613 Initialise a new Roundup tracker. | |
| 614 | |
| 615 The administrator details will be set at this step. | |
| 616 | |
| 617 Execute the tracker's initialisation function dbinit.init() | |
| 618 """ | |
| 619 # password | |
| 620 if len(args) > 1: | |
| 621 adminpw = args[1] | |
| 622 else: | |
| 623 adminpw = '' | |
| 624 confirm = 'x' | |
| 625 while adminpw != confirm: | |
| 626 adminpw = getpass.getpass(_('Admin Password: ')) | |
| 627 confirm = getpass.getpass(_(' Confirm: ')) | |
| 628 | |
| 629 # make sure the tracker home is installed | |
| 630 if not os.path.exists(tracker_home): | |
| 631 raise UsageError(_('Instance home does not exist') % locals()) | |
| 632 try: | |
| 633 tracker = roundup.instance.open(tracker_home) | |
| 634 except roundup.instance.TrackerError: | |
| 635 raise UsageError(_('Instance has not been installed') % locals()) | |
| 636 | |
| 637 # is there already a database? | |
| 638 if tracker.exists(): | |
| 639 if not self.force: | |
| 640 ok = self.my_input(_( | |
| 641 """WARNING: The database is already initialised! | |
| 642 If you re-initialise it, you will lose all the data! | |
| 643 Erase it? Y/N: """)) # noqa: E122 | |
| 644 if ok.strip().lower() != 'y': | |
| 645 return 0 | |
| 646 | |
| 647 # nuke it | |
| 648 tracker.nuke() | |
| 649 | |
| 650 # GO | |
| 651 try: | |
| 652 tracker.init(password.Password(adminpw, config=tracker.config), | |
| 653 tx_Source='cli') | |
| 654 except OptionUnsetError as e: | |
| 655 raise UsageError("In %(tracker_home)s/config.ini - %(error)s" % { | |
| 656 'error': str(e), 'tracker_home': tracker_home}) | |
| 657 | |
| 658 return 0 | |
| 659 | 814 |
| 660 def do_get(self, args): | 815 def do_get(self, args): |
| 661 ''"""Usage: get property designator[,designator]* | 816 ''"""Usage: get property designator[,designator]* |
| 662 Get the given property of one or more designator(s). | 817 Get the given property of one or more designator(s). |
| 663 | 818 |
| 738 if self.separator: | 893 if self.separator: |
| 739 print(self.separator.join(linked_props)) | 894 print(self.separator.join(linked_props)) |
| 740 | 895 |
| 741 return 0 | 896 return 0 |
| 742 | 897 |
| 743 def do_set(self, args): | 898 def do_help(self, args, nl_re=nl_re, indent_re=indent_re): |
| 744 ''"""Usage: set items property=value property=value ... | 899 ''"""Usage: help topic |
| 745 Set the given properties of one or more items(s). | 900 Give help about topic. |
| 746 | 901 |
| 747 The items are specified as a class or as a comma-separated | 902 commands -- list commands |
| 748 list of item designators (ie "designator[,designator,...]"). | 903 <command> -- help specific to a command |
| 904 initopts -- init command options | |
| 905 all -- all available help | |
| 906 """ | |
| 907 if len(args) > 0: | |
| 908 topic = args[0] | |
| 909 else: | |
| 910 topic = 'help' | |
| 911 | |
| 912 # try help_ methods | |
| 913 if topic in self.help: | |
| 914 self.help[topic]() | |
| 915 return 0 | |
| 916 | |
| 917 # try command docstrings | |
| 918 try: | |
| 919 cmd_docs = self.commands.get(topic) | |
| 920 except KeyError: | |
| 921 print(_('Sorry, no help for "%(topic)s"') % locals()) | |
| 922 return 1 | |
| 923 | |
| 924 # display the help for each match, removing the docstring indent | |
| 925 for _name, help in cmd_docs: | |
| 926 lines = nl_re.split(_(help.__doc__)) | |
| 927 print(lines[0]) | |
| 928 indent = indent_re.match(lines[1]) | |
| 929 if indent: indent = len(indent.group(1)) # noqa: E701 | |
| 930 for line in lines[1:]: | |
| 931 if indent: | |
| 932 print(line[indent:]) | |
| 933 else: | |
| 934 print(line) | |
| 935 return 0 | |
| 936 | |
| 937 def do_history(self, args): | |
| 938 ''"""Usage: history designator [skipquiet] | |
| 939 Show the history entries of a designator. | |
| 749 | 940 |
| 750 A designator is a classname and a nodeid concatenated, | 941 A designator is a classname and a nodeid concatenated, |
| 751 eg. bug1, user10, ... | 942 eg. bug1, user10, ... |
| 752 | 943 |
| 753 This command sets the properties to the values for all | 944 Lists the journal entries viewable by the user for the |
| 754 designators given. If a class is used, the property will be | 945 node identified by the designator. If skipquiet is the |
| 755 set for all items in the class. If the value is missing | 946 second argument, journal entries for quiet properties |
| 756 (ie. "property=") then the property is un-set. If the property | 947 are not shown. |
| 757 is a multilink, you specify the linked ids for the multilink | 948 """ |
| 758 as comma-separated numbers (ie "1,2,3"). | 949 |
| 759 | |
| 760 """ | |
| 761 import copy # needed for copying props list | |
| 762 | |
| 763 if len(args) < 2: | |
| 764 raise UsageError(_('Not enough arguments supplied')) | |
| 765 from roundup import hyperdb | |
| 766 | |
| 767 designators = args[0].split(',') | |
| 768 if len(designators) == 1: | |
| 769 designator = designators[0] | |
| 770 try: | |
| 771 designator = hyperdb.splitDesignator(designator) | |
| 772 designators = [designator] | |
| 773 except hyperdb.DesignatorError: | |
| 774 cl = self.get_class(designator) | |
| 775 designators = [(designator, x) for x in cl.list()] | |
| 776 else: | |
| 777 try: | |
| 778 designators = [hyperdb.splitDesignator(x) for x in designators] | |
| 779 except hyperdb.DesignatorError as message: | |
| 780 raise UsageError(message) | |
| 781 | |
| 782 # get the props from the args | |
| 783 propset = self.props_from_args(args[1:]) # parse the cli once | |
| 784 | |
| 785 # now do the set for all the nodes | |
| 786 for classname, itemid in designators: | |
| 787 props = copy.copy(propset) # make a new copy for every designator | |
| 788 cl = self.get_class(classname) | |
| 789 | |
| 790 for key, value in list(props.items()): | |
| 791 try: | |
| 792 # You must reinitialize the props every time though. | |
| 793 # if props['nosy'] = '+admin' initally, it gets | |
| 794 # set to 'demo,admin' (assuming it was set to demo | |
| 795 # in the db) after rawToHyperdb returns. | |
| 796 # This new value is used for all the rest of the | |
| 797 # designators if not reinitalized. | |
| 798 props[key] = hyperdb.rawToHyperdb(self.db, cl, itemid, | |
| 799 key, value) | |
| 800 except hyperdb.HyperdbValueError as message: | |
| 801 raise UsageError(message) | |
| 802 | |
| 803 # try the set | |
| 804 try: | |
| 805 cl.set(itemid, **props) | |
| 806 except (TypeError, IndexError, ValueError) as message: | |
| 807 raise UsageError(message) | |
| 808 self.db_uncommitted = True | |
| 809 return 0 | |
| 810 | |
| 811 def do_filter(self, args): | |
| 812 ''"""Usage: filter classname propname=value ... | |
| 813 Find the nodes of the given class with a given property value. | |
| 814 | |
| 815 Find the nodes of the given class with a given property value. | |
| 816 Multiple values can be specified by separating them with commas. | |
| 817 If property is a string, all values must match. I.E. it's an | |
| 818 'and' operation. If the property is a link/multilink any value | |
| 819 matches. I.E. an 'or' operation. | |
| 820 """ | |
| 821 if len(args) < 1: | 950 if len(args) < 1: |
| 822 raise UsageError(_('Not enough arguments supplied')) | 951 raise UsageError(_('Not enough arguments supplied')) |
| 823 classname = args[0] | |
| 824 # get the class | |
| 825 cl = self.get_class(classname) | |
| 826 | |
| 827 # handle the propname=value argument | |
| 828 props = self.props_from_args(args[1:]) | |
| 829 | |
| 830 # convert the user-input value to a value used for filter | |
| 831 # multiple , separated values become a list | |
| 832 for propname, value in props.items(): | |
| 833 if ',' in value: | |
| 834 values = value.split(',') | |
| 835 else: | |
| 836 values = [value] | |
| 837 | |
| 838 props[propname] = [] | |
| 839 # start handling transitive props | |
| 840 # given filter issue assignedto.roles=Admin | |
| 841 # start at issue | |
| 842 curclass = cl | |
| 843 lastprop = propname # handle case 'issue assignedto=admin' | |
| 844 if '.' in propname: | |
| 845 # start splitting transitive prop into components | |
| 846 # we end when we have no more links | |
| 847 for pn in propname.split('.'): | |
| 848 try: | |
| 849 lastprop = pn # get current component | |
| 850 # get classname for this link | |
| 851 try: | |
| 852 curclassname = curclass.getprops()[pn].classname | |
| 853 except KeyError: | |
| 854 raise UsageError(_( | |
| 855 "Class %(curclassname)s has " | |
| 856 "no property %(pn)s in %(propname)s." % | |
| 857 locals())) | |
| 858 # get class object | |
| 859 curclass = self.get_class(curclassname) | |
| 860 except AttributeError: | |
| 861 # curclass.getprops()[pn].classname raises this | |
| 862 # when we are at a non link/multilink property | |
| 863 pass | |
| 864 | |
| 865 for value in values: | |
| 866 val = hyperdb.rawToHyperdb(self.db, curclass, None, | |
| 867 lastprop, value) | |
| 868 props[propname].append(val) | |
| 869 | |
| 870 # now do the filter | |
| 871 try: | 952 try: |
| 872 id = [] | 953 classname, nodeid = hyperdb.splitDesignator(args[0]) |
| 873 designator = [] | 954 except hyperdb.DesignatorError as message: |
| 874 props = {"filterspec": props} | 955 raise UsageError(message) |
| 875 | 956 |
| 876 if self.separator: | 957 skipquiet = False |
| 877 if self.print_designator: | 958 if len(args) == 2: |
| 878 id = cl.filter(None, **props) | 959 if args[1] != 'skipquiet': |
| 879 for i in id: | 960 raise UsageError("Second argument is not skipquiet") |
| 880 designator.append(classname + i) | 961 skipquiet = True |
| 881 print(self.separator.join(designator)) | 962 |
| 882 else: | 963 try: |
| 883 print(self.separator.join(cl.filter(None, **props))) | 964 print(self.db.getclass(classname).history(nodeid, |
| 884 else: | 965 skipquiet=skipquiet)) |
| 885 if self.print_designator: | |
| 886 id = cl.filter(None, **props) | |
| 887 for i in id: | |
| 888 designator.append(classname + i) | |
| 889 print(designator) | |
| 890 else: | |
| 891 print(cl.filter(None, **props)) | |
| 892 except KeyError: | 966 except KeyError: |
| 893 raise UsageError(_('%(classname)s has no property ' | 967 raise UsageError(_('no such class "%(classname)s"') % locals()) |
| 894 '"%(propname)s"') % locals()) | 968 except IndexError: |
| 895 except (ValueError, TypeError) as message: | 969 raise UsageError(_('no such %(classname)s node ' |
| 896 raise UsageError(message) | 970 '"%(nodeid)s"') % locals()) |
| 897 return 0 | 971 return 0 |
| 898 | 972 |
| 899 def do_find(self, args): | 973 def do_import(self, args, import_files=True): |
| 900 ''"""Usage: find classname propname=value ... | 974 ''"""Usage: import import_dir |
| 901 Find the nodes of the given class with a given link property value. | 975 Import a database from the directory containing CSV files, |
| 902 | 976 two per class to import. |
| 903 Find the nodes of the given class with a given link property value. | 977 |
| 904 The value may be either the nodeid of the linked node, or its key | 978 The files used in the import are: |
| 905 value. | 979 |
| 980 <class>.csv | |
| 981 This must define the same properties as the class (including | |
| 982 having a "header" line with those property names.) | |
| 983 <class>-journals.csv | |
| 984 This defines the journals for the items being imported. | |
| 985 | |
| 986 The imported nodes will have the same nodeid as defined in the | |
| 987 import file, thus replacing any existing content. | |
| 988 | |
| 989 The new nodes are added to the existing database - if you want to | |
| 990 create a new database using the imported data, then create a new | |
| 991 database (or, tediously, retire all the old data.) | |
| 906 """ | 992 """ |
| 907 if len(args) < 1: | 993 if len(args) < 1: |
| 908 raise UsageError(_('Not enough arguments supplied')) | 994 raise UsageError(_('Not enough arguments supplied')) |
| 909 classname = args[0] | 995 |
| 910 # get the class | 996 if hasattr(csv, 'field_size_limit'): |
| 911 cl = self.get_class(classname) | 997 csv.field_size_limit(self.db.config.CSV_FIELD_SIZE) |
| 912 | 998 |
| 913 # handle the propname=value argument | 999 # directory to import from |
| 914 props = self.props_from_args(args[1:]) | 1000 dir = args[0] |
| 915 | 1001 |
| 916 # convert the user-input value to a value used for find() | 1002 class colon_separated(csv.excel): |
| 917 for propname, value in props.items(): | 1003 delimiter = ':' |
| 918 if ',' in value: | 1004 |
| 919 values = value.split(',') | 1005 # import all the files |
| 920 else: | 1006 for file in os.listdir(dir): |
| 921 values = [value] | 1007 classname, ext = os.path.splitext(file) |
| 922 d = props[propname] = {} | 1008 # we only care about CSV files |
| 923 for value in values: | 1009 if ext != '.csv' or classname.endswith('-journals'): |
| 924 value = hyperdb.rawToHyperdb(self.db, cl, None, | 1010 continue |
| 925 propname, value) | 1011 |
| 926 if isinstance(value, list): | 1012 cl = self.get_class(classname) |
| 927 for entry in value: | 1013 |
| 928 d[entry] = 1 | 1014 # ensure that the properties and the CSV file headings match |
| 929 else: | 1015 with open(os.path.join(dir, file), 'r') as f: |
| 930 d[value] = 1 | 1016 reader = csv.reader(f, colon_separated) |
| 931 | 1017 file_props = None |
| 932 # now do the find | 1018 maxid = 1 |
| 1019 # loop through the file and create a node for each entry | |
| 1020 for n, r in enumerate(reader): | |
| 1021 if file_props is None: | |
| 1022 file_props = r | |
| 1023 continue | |
| 1024 | |
| 1025 if self.verbose: | |
| 1026 sys.stdout.write('\rImporting %s - %s' % (classname, n)) | |
| 1027 sys.stdout.flush() | |
| 1028 | |
| 1029 # do the import and figure the current highest nodeid | |
| 1030 nodeid = cl.import_list(file_props, r) | |
| 1031 if hasattr(cl, 'import_files') and import_files: | |
| 1032 cl.import_files(dir, nodeid) | |
| 1033 maxid = max(maxid, int(nodeid)) | |
| 1034 | |
| 1035 # (print to sys.stdout here to allow tests to squash it .. ugh) | |
| 1036 print(file=sys.stdout) | |
| 1037 | |
| 1038 # import the journals | |
| 1039 with open(os.path.join(args[0], classname + '-journals.csv'), 'r') as f: | |
| 1040 reader = csv.reader(f, colon_separated) | |
| 1041 cl.import_journals(reader) | |
| 1042 | |
| 1043 # (print to sys.stdout here to allow tests to squash it .. ugh) | |
| 1044 print('setting', classname, maxid+1, file=sys.stdout) | |
| 1045 | |
| 1046 # set the id counter | |
| 1047 self.db.setid(classname, str(maxid+1)) | |
| 1048 | |
| 1049 self.db_uncommitted = True | |
| 1050 return 0 | |
| 1051 | |
| 1052 def do_importtables(self, args): | |
| 1053 ''"""Usage: importtables export_dir | |
| 1054 | |
| 1055 This imports the database tables exported using exporttables. | |
| 1056 """ | |
| 1057 return self.do_import(args, import_files=False) | |
| 1058 | |
| 1059 def do_initialise(self, tracker_home, args): | |
| 1060 ''"""Usage: initialise [adminpw] | |
| 1061 Initialise a new Roundup tracker. | |
| 1062 | |
| 1063 The administrator details will be set at this step. | |
| 1064 | |
| 1065 Execute the tracker's initialisation function dbinit.init() | |
| 1066 """ | |
| 1067 # password | |
| 1068 if len(args) > 1: | |
| 1069 adminpw = args[1] | |
| 1070 else: | |
| 1071 adminpw = '' | |
| 1072 confirm = 'x' | |
| 1073 while adminpw != confirm: | |
| 1074 adminpw = getpass.getpass(_('Admin Password: ')) | |
| 1075 confirm = getpass.getpass(_(' Confirm: ')) | |
| 1076 | |
| 1077 # make sure the tracker home is installed | |
| 1078 if not os.path.exists(tracker_home): | |
| 1079 raise UsageError(_('Instance home does not exist') % locals()) | |
| 933 try: | 1080 try: |
| 934 id = [] | 1081 tracker = roundup.instance.open(tracker_home) |
| 935 designator = [] | 1082 except roundup.instance.TrackerError: |
| 936 if self.separator: | 1083 raise UsageError(_('Instance has not been installed') % locals()) |
| 937 if self.print_designator: | 1084 |
| 938 id = cl.find(**props) | 1085 # is there already a database? |
| 939 for i in id: | 1086 if tracker.exists(): |
| 940 designator.append(classname + i) | 1087 if not self.force: |
| 941 print(self.separator.join(designator)) | 1088 ok = self.my_input(_( |
| 942 else: | 1089 """WARNING: The database is already initialised! |
| 943 print(self.separator.join(cl.find(**props))) | 1090 If you re-initialise it, you will lose all the data! |
| 944 | 1091 Erase it? Y/N: """)) # noqa: E122 |
| 945 else: | 1092 if ok.strip().lower() != 'y': |
| 946 if self.print_designator: | 1093 return 0 |
| 947 id = cl.find(**props) | 1094 |
| 948 for i in id: | 1095 # nuke it |
| 949 designator.append(classname + i) | 1096 tracker.nuke() |
| 950 print(designator) | 1097 |
| 951 else: | 1098 # GO |
| 952 print(cl.find(**props)) | 1099 try: |
| 953 except KeyError: | 1100 tracker.init(password.Password(adminpw, config=tracker.config), |
| 954 raise UsageError(_('%(classname)s has no property ' | 1101 tx_Source='cli') |
| 955 '"%(propname)s"') % locals()) | 1102 except OptionUnsetError as e: |
| 956 except (ValueError, TypeError) as message: | 1103 raise UsageError("In %(tracker_home)s/config.ini - %(error)s" % { |
| 957 raise UsageError(message) | 1104 'error': str(e), 'tracker_home': tracker_home}) |
| 958 return 0 | 1105 |
| 959 | 1106 return 0 |
| 960 def do_specification(self, args): | 1107 |
| 961 ''"""Usage: specification classname | 1108 def do_install(self, tracker_home, args): |
| 962 Show the properties for a classname. | 1109 ''"""Usage: install [template [backend [key=val[,key=val]]]] |
| 963 | 1110 Install a new Roundup tracker. |
| 964 This lists the properties for a given class. | 1111 |
| 1112 The command will prompt for the tracker home directory | |
| 1113 (if not supplied through TRACKER_HOME or the -i option). | |
| 1114 The template and backend may be specified on the command-line | |
| 1115 as arguments, in that order. | |
| 1116 | |
| 1117 Command line arguments following the backend allows you to | |
| 1118 pass initial values for config options. For example, passing | |
| 1119 "web_http_auth=no,rdbms_user=dinsdale" will override defaults | |
| 1120 for options http_auth in section [web] and user in section [rdbms]. | |
| 1121 Please be careful to not use spaces in this argument! (Enclose | |
| 1122 whole argument in quotes if you need spaces in option value). | |
| 1123 | |
| 1124 The initialise command must be called after this command in order | |
| 1125 to initialise the tracker's database. You may edit the tracker's | |
| 1126 initial database contents before running that command by editing | |
| 1127 the tracker's dbinit.py module init() function. | |
| 1128 | |
| 1129 See also initopts help. | |
| 965 """ | 1130 """ |
| 966 if len(args) < 1: | 1131 if len(args) < 1: |
| 967 raise UsageError(_('Not enough arguments supplied')) | 1132 raise UsageError(_('Not enough arguments supplied')) |
| 968 classname = args[0] | 1133 |
| 969 # get the class | 1134 # make sure the tracker home can be created |
| 970 cl = self.get_class(classname) | 1135 tracker_home = os.path.abspath(tracker_home) |
| 971 | 1136 parent = os.path.split(tracker_home)[0] |
| 972 # get the key property | 1137 if not os.path.exists(parent): |
| 973 keyprop = cl.getkey() | 1138 raise UsageError(_('Instance home parent directory "%(parent)s"' |
| 974 for key in cl.properties: | 1139 ' does not exist') % locals()) |
| 975 value = cl.properties[key] | 1140 |
| 976 if keyprop == key: | 1141 config_ini_file = os.path.join(tracker_home, CoreConfig.INI_FILE) |
| 977 sys.stdout.write(_('%(key)s: %(value)s (key property)\n') % | 1142 # check for both old- and new-style configs |
| 978 locals()) | 1143 if list(filter(os.path.exists, [config_ini_file, |
| 1144 os.path.join(tracker_home, 'config.py')])): | |
| 1145 if not self.force: | |
| 1146 ok = self.my_input(_( | |
| 1147 """WARNING: There appears to be a tracker in "%(tracker_home)s"! | |
| 1148 If you re-install it, you will lose all the data! | |
| 1149 Erase it? Y/N: """) % locals()) # noqa: E122 | |
| 1150 if ok.strip().lower() != 'y': | |
| 1151 return 0 | |
| 1152 | |
| 1153 # clear it out so the install isn't confused | |
| 1154 shutil.rmtree(tracker_home) | |
| 1155 | |
| 1156 # select template | |
| 1157 templates = self.listTemplates() | |
| 1158 template = self._get_choice( | |
| 1159 list_name=_('Templates:'), | |
| 1160 prompt=_('Select template'), | |
| 1161 options=templates, | |
| 1162 argument=len(args) > 1 and args[1] or '', | |
| 1163 default='classic') | |
| 1164 | |
| 1165 # select hyperdb backend | |
| 1166 import roundup.backends | |
| 1167 backends = roundup.backends.list_backends() | |
| 1168 backend = self._get_choice( | |
| 1169 list_name=_('Back ends:'), | |
| 1170 prompt=_('Select backend'), | |
| 1171 options=backends, | |
| 1172 argument=len(args) > 2 and args[2] or '', | |
| 1173 default='anydbm') | |
| 1174 # XXX perform a unit test based on the user's selections | |
| 1175 | |
| 1176 # Process configuration file definitions | |
| 1177 if len(args) > 3: | |
| 1178 try: | |
| 1179 defns = dict([item.split("=") for item in args[3].split(",")]) | |
| 1180 except Exception: | |
| 1181 print(_('Error in configuration settings: "%s"') % args[3]) | |
| 1182 raise | |
| 1183 else: | |
| 1184 defns = {} | |
| 1185 | |
| 1186 defns['rdbms_backend'] = backend | |
| 1187 | |
| 1188 # load config_ini.ini from template if it exists. | |
| 1189 # it sets parameters like template_engine that are | |
| 1190 # template specific. | |
| 1191 template_config = UserConfig(templates[template]['path'] + | |
| 1192 "/config_ini.ini") | |
| 1193 for k in template_config.keys(): | |
| 1194 if k == 'HOME': # ignore home. It is a default param. | |
| 1195 continue | |
| 1196 defns[k] = template_config[k] | |
| 1197 | |
| 1198 # install! | |
| 1199 init.install(tracker_home, templates[template]['path'], settings=defns) | |
| 1200 | |
| 1201 # Remove config_ini.ini file from tracker_home (not template dir). | |
| 1202 # Ignore file not found - not all templates have | |
| 1203 # config_ini.ini files. | |
| 1204 try: | |
| 1205 os.remove(tracker_home + "/config_ini.ini") | |
| 1206 except OSError as e: # FileNotFound exception under py3 | |
| 1207 if e.errno == 2: | |
| 1208 pass | |
| 979 else: | 1209 else: |
| 980 sys.stdout.write(_('%(key)s: %(value)s\n') % locals()) | 1210 raise |
| 981 | 1211 |
| 982 def do_display(self, args): | 1212 print(_(""" |
| 983 ''"""Usage: display designator[,designator]* | 1213 --------------------------------------------------------------------------- |
| 984 | 1214 You should now edit the tracker configuration file: |
| 985 Show the property values for the given node(s). | 1215 %(config_file)s""") % {"config_file": config_ini_file}) |
| 986 | 1216 |
| 987 A designator is a classname and a nodeid concatenated, | 1217 # find list of options that need manual adjustments |
| 988 eg. bug1, user10, ... | 1218 # XXX config._get_unset_options() is marked as private |
| 989 | 1219 # (leading underscore). make it public or don't care? |
| 990 This lists the properties and their associated values | 1220 need_set = CoreConfig(tracker_home)._get_unset_options() |
| 991 for the given node. | 1221 if need_set: |
| 992 """ | 1222 print(_(" ... at a minimum, you must set following options:")) |
| 993 if len(args) < 1: | 1223 for section in need_set: |
| 994 raise UsageError(_('Not enough arguments supplied')) | 1224 print(" [%s]: %s" % (section, ", ".join(need_set[section]))) |
| 995 | 1225 |
| 996 # decode the node designator | 1226 # note about schema modifications |
| 997 for designator in args[0].split(','): | 1227 print(_(""" |
| 998 try: | 1228 If you wish to modify the database schema, |
| 999 classname, nodeid = hyperdb.splitDesignator(designator) | 1229 you should also edit the schema file: |
| 1000 except hyperdb.DesignatorError as message: | 1230 %(database_config_file)s |
| 1001 raise UsageError(message) | 1231 You may also change the database initialisation file: |
| 1002 | 1232 %(database_init_file)s |
| 1003 # get the class | 1233 ... see the documentation on customizing for more information. |
| 1004 cl = self.get_class(classname) | 1234 |
| 1005 | 1235 You MUST run the "roundup-admin initialise" command once you've performed |
| 1006 # display the values | 1236 the above steps. |
| 1007 keys = sorted(cl.properties) | 1237 --------------------------------------------------------------------------- |
| 1008 for key in keys: | 1238 """) % {'database_config_file': os.path.join(tracker_home, 'schema.py'), |
| 1009 value = cl.get(nodeid, key) | 1239 'database_init_file': os.path.join(tracker_home, 'initial_data.py')}) \ |
| 1010 print(_('%(key)s: %(value)s') % locals()) | 1240 # noqa: E122 |
| 1011 | |
| 1012 def do_create(self, args): | |
| 1013 ''"""Usage: create classname property=value ... | |
| 1014 Create a new entry of a given class. | |
| 1015 | |
| 1016 This creates a new entry of the given class using the property | |
| 1017 name=value arguments provided on the command line after the "create" | |
| 1018 command. | |
| 1019 """ | |
| 1020 if len(args) < 1: | |
| 1021 raise UsageError(_('Not enough arguments supplied')) | |
| 1022 from roundup import hyperdb | |
| 1023 | |
| 1024 classname = args[0] | |
| 1025 | |
| 1026 # get the class | |
| 1027 cl = self.get_class(classname) | |
| 1028 | |
| 1029 # now do a create | |
| 1030 props = {} | |
| 1031 properties = cl.getprops(protected=0) | |
| 1032 if len(args) == 1: | |
| 1033 # ask for the properties | |
| 1034 for key in properties: | |
| 1035 if key == 'id': continue # noqa: E701 | |
| 1036 value = properties[key] | |
| 1037 name = value.__class__.__name__ | |
| 1038 if isinstance(value, hyperdb.Password): | |
| 1039 again = None | |
| 1040 while value != again: | |
| 1041 value = getpass.getpass(_('%(propname)s (Password): ') | |
| 1042 % | |
| 1043 {'propname': key.capitalize()}) | |
| 1044 again = getpass.getpass(_(' %(propname)s (Again): ') | |
| 1045 % | |
| 1046 {'propname': key.capitalize()}) | |
| 1047 if value != again: | |
| 1048 print(_('Sorry, try again...')) | |
| 1049 if value: | |
| 1050 props[key] = value | |
| 1051 else: | |
| 1052 value = self.my_input(_('%(propname)s (%(proptype)s): ') % { | |
| 1053 'propname': key.capitalize(), 'proptype': name}) | |
| 1054 if value: | |
| 1055 props[key] = value | |
| 1056 else: | |
| 1057 props = self.props_from_args(args[1:]) | |
| 1058 | |
| 1059 # convert types | |
| 1060 for propname in props: | |
| 1061 try: | |
| 1062 props[propname] = hyperdb.rawToHyperdb(self.db, cl, None, | |
| 1063 propname, | |
| 1064 props[propname]) | |
| 1065 except hyperdb.HyperdbValueError as message: | |
| 1066 raise UsageError(message) | |
| 1067 | |
| 1068 # check for the key property | |
| 1069 propname = cl.getkey() | |
| 1070 if propname and propname not in props: | |
| 1071 raise UsageError(_('you must provide the "%(propname)s" ' | |
| 1072 'property.') % locals()) | |
| 1073 | |
| 1074 # do the actual create | |
| 1075 try: | |
| 1076 sys.stdout.write(cl.create(**props) + '\n') | |
| 1077 except (TypeError, IndexError, ValueError) as message: | |
| 1078 raise UsageError(message) | |
| 1079 self.db_uncommitted = True | |
| 1080 return 0 | 1241 return 0 |
| 1081 | 1242 |
| 1082 def do_list(self, args): | 1243 def do_list(self, args): |
| 1083 ''"""Usage: list classname [property] | 1244 ''"""Usage: list classname [property] |
| 1084 List the instances of a class. | 1245 List the instances of a class. |
| 1130 raise UsageError(_('%(classname)s has no property ' | 1291 raise UsageError(_('%(classname)s has no property ' |
| 1131 '"%(propname)s"') % locals()) | 1292 '"%(propname)s"') % locals()) |
| 1132 print(_('%(nodeid)4s: %(value)s') % locals()) | 1293 print(_('%(nodeid)4s: %(value)s') % locals()) |
| 1133 return 0 | 1294 return 0 |
| 1134 | 1295 |
| 1135 def do_templates(self, args): | 1296 def do_migrate(self, args): |
| 1136 ''"""Usage: templates [trace_search] | 1297 ''"""Usage: migrate |
| 1137 List templates and their installed directories. | 1298 |
| 1138 | 1299 Update a tracker's database to be compatible with the Roundup |
| 1139 With trace_search also list all directories that are | 1300 codebase. |
| 1140 searched for templates. | 1301 |
| 1141 """ | 1302 You should run the "migrate" command for your tracker once |
| 1142 import textwrap | 1303 you've installed the latest codebase. |
| 1143 | 1304 |
| 1144 trace_search = False | 1305 Do this before you use the web, command-line or mail interface |
| 1145 if args and args[0] == "trace_search": | 1306 and before any users access the tracker. |
| 1146 trace_search = True | 1307 |
| 1147 | 1308 This command will respond with either "Tracker updated" (if |
| 1148 templates = self.listTemplates(trace_search=trace_search) | 1309 you've not previously run it on an RDBMS backend) or "No |
| 1149 | 1310 migration action required" (if you have run it, or have used |
| 1150 for name in sorted(list(templates.keys())): | 1311 another interface to the tracker, or possibly because you are |
| 1151 templates[name]['description'] = textwrap.fill( | 1312 using anydbm). |
| 1152 "\n".join([line.lstrip() for line in | 1313 |
| 1153 templates[name]['description'].split("\n")]), | 1314 It's safe to run this even if it's not required, so just get |
| 1154 70, | 1315 into the habit. |
| 1155 subsequent_indent=" " | 1316 """ |
| 1156 ) | 1317 if self.db.db_version_updated: |
| 1157 print(""" | 1318 print(_('Tracker updated to schema version %s.') % |
| 1158 Name: %(name)s | 1319 self.db.database_schema['version']) |
| 1159 Path: %(path)s | 1320 self.db_uncommitted = True |
| 1160 Desc: %(description)s | 1321 else: |
| 1161 """ % templates[name]) | 1322 print(_('No migration action required. At schema version %s.') % |
| 1162 | 1323 self.db.database_schema['version']) |
| 1163 def do_table(self, args): | 1324 return 0 |
| 1164 ''"""Usage: table classname [property[,property]*] | 1325 |
| 1165 List the instances of a class in tabular form. | 1326 def do_pack(self, args): |
| 1166 | 1327 ''"""Usage: pack period | date |
| 1167 Lists all instances of the given class. If the properties are not | 1328 |
| 1168 specified, all properties are displayed. By default, the column | 1329 Remove journal entries older than a period of time specified or |
| 1169 widths are the width of the largest value. The width may be | 1330 before a certain date. |
| 1170 explicitly defined by defining the property as "name:width". | 1331 |
| 1171 For example:: | 1332 A period is specified using the suffixes "y", "m", and "d". The |
| 1172 | 1333 suffix "w" (for "week") means 7 days. |
| 1173 roundup> table priority id,name:10 | 1334 |
| 1174 Id Name | 1335 "3y" means three years |
| 1175 1 fatal-bug | 1336 "2y 1m" means two years and one month |
| 1176 2 bug | 1337 "1m 25d" means one month and 25 days |
| 1177 3 usability | 1338 "2w 3d" means two weeks and three days |
| 1178 4 feature | 1339 |
| 1179 | 1340 Date format is "YYYY-MM-DD" eg: |
| 1180 Also to make the width of the column the width of the label, | 1341 2001-01-01 |
| 1181 leave a trailing : without a width on the property. For example:: | 1342 |
| 1182 | 1343 """ |
| 1183 roundup> table priority id,name: | 1344 if len(args) != 1: |
| 1184 Id Name | |
| 1185 1 fata | |
| 1186 2 bug | |
| 1187 3 usab | |
| 1188 4 feat | |
| 1189 | |
| 1190 will result in a the 4 character wide "Name" column. | |
| 1191 """ | |
| 1192 if len(args) < 1: | |
| 1193 raise UsageError(_('Not enough arguments supplied')) | 1345 raise UsageError(_('Not enough arguments supplied')) |
| 1194 classname = args[0] | 1346 |
| 1195 | 1347 # are we dealing with a period or a date |
| 1196 # get the class | 1348 value = args[0] |
| 1197 cl = self.get_class(classname) | 1349 date_re = re.compile(r""" |
| 1198 | 1350 (?P<date>\d\d\d\d-\d\d?-\d\d?)? # yyyy-mm-dd |
| 1199 # figure the property names to display | 1351 (?P<period>(\d+y\s*)?(\d+m\s*)?(\d+d\s*)?)? |
| 1200 if len(args) > 1: | 1352 """, re.VERBOSE) |
| 1201 prop_names = args[1].split(',') | 1353 m = date_re.match(value) |
| 1202 all_props = cl.getprops() | 1354 if not m: |
| 1203 for spec in prop_names: | 1355 raise ValueError(_('Invalid format')) |
| 1204 if ':' in spec: | 1356 m = m.groupdict() |
| 1357 if m['period']: | |
| 1358 pack_before = date.Date(". - %s" % value) | |
| 1359 elif m['date']: | |
| 1360 pack_before = date.Date(value) | |
| 1361 self.db.pack(pack_before) | |
| 1362 self.db_uncommitted = True | |
| 1363 return 0 | |
| 1364 | |
| 1365 def do_perftest(self, args): | |
| 1366 ''"""Usage: perftest [mode] [arguments]* | |
| 1367 | |
| 1368 Time operations in Roundup. Supported arguments: | |
| 1369 | |
| 1370 [password] [rounds=<integer>] [scheme=<scheme>] | |
| 1371 | |
| 1372 'password' is the default mode. The tracker's config.ini | |
| 1373 setting for 'password_pbkdf2_default_rounds' is the default | |
| 1374 value for 'rounds'. On the command line, 'rounds' can include | |
| 1375 thousands separator of ',' or '.'. 'scheme' is the default | |
| 1376 coded into Roundup. List supported schemes by using 'scheme='. | |
| 1377 | |
| 1378 """ | |
| 1379 from roundup.anypy.time_ import perf_counter | |
| 1380 | |
| 1381 props = {"rounds": self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS, | |
| 1382 "scheme": password.Password.known_schemes[0]} | |
| 1383 | |
| 1384 print_supported_schemes = lambda: print( | |
| 1385 "Supported schemes (default is first, case " | |
| 1386 "sensitive):\n %s." % | |
| 1387 ", ".join(password.Password.known_schemes)) | |
| 1388 | |
| 1389 if (args[0].find("=") != -1): | |
| 1390 args.insert(0, 'password') | |
| 1391 | |
| 1392 props.update(self.props_from_args(args[1:])) | |
| 1393 | |
| 1394 if args[0] == "password": | |
| 1395 try: | |
| 1396 # convert 10,000,000 or 10.000.000 to 10000000 | |
| 1397 r = int(re.sub('[,.]', '', props['rounds'])) | |
| 1398 if r < 1000: | |
| 1399 print(_("Invalid 'rounds'. Must be larger than 999.")) | |
| 1400 return | |
| 1401 props['rounds'] = r | |
| 1402 except (TypeError, ValueError): | |
| 1403 print(_("Invalid 'rounds'. It must be an integer not: %s") % | |
| 1404 props['rounds']) | |
| 1405 return | |
| 1406 if props['scheme'] is None: | |
| 1407 print_supported_schemes() | |
| 1408 return | |
| 1409 | |
| 1410 self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS = props['rounds'] | |
| 1411 | |
| 1412 try: | |
| 1413 tic = perf_counter() | |
| 1414 pw_hash = password.encodePassword( | |
| 1415 "this is a long password to hash", | |
| 1416 props['scheme'], | |
| 1417 None, | |
| 1418 config=self.db.config | |
| 1419 ) | |
| 1420 toc = perf_counter() | |
| 1421 except password.PasswordValueError as e: | |
| 1422 print(e) | |
| 1423 print_supported_schemes() | |
| 1424 return | |
| 1425 | |
| 1426 if props['scheme'].startswith('PBKDF2'): | |
| 1427 (rounds, salt, _raw_salt, digest) = password.pbkdf2_unpack( | |
| 1428 pw_hash) | |
| 1429 else: | |
| 1430 rounds = _("scheme does not support rounds.") | |
| 1431 | |
| 1432 print(_( | |
| 1433 "Hash time: %(time)0.9f seconds, scheme: %(scheme)s, " | |
| 1434 "rounds: %(rounds)s") % | |
| 1435 {"time": toc-tic, "scheme": props['scheme'], | |
| 1436 "rounds": rounds}) | |
| 1437 | |
| 1438 designator_re = re.compile('([A-Za-z]+)([0-9]+)') | |
| 1439 | |
| 1440 def do_reindex(self, args, desre=designator_re): | |
| 1441 ''"""Usage: reindex [classname|designator]* | |
| 1442 Re-generate a tracker's search indexes. | |
| 1443 | |
| 1444 This will re-generate the search indexes for a tracker. | |
| 1445 This will typically happen automatically. | |
| 1446 """ | |
| 1447 if args: | |
| 1448 for arg in args: | |
| 1449 m = desre.match(arg) | |
| 1450 if m: | |
| 1451 cl = self.get_class(m.group(1)) | |
| 1205 try: | 1452 try: |
| 1206 propname, width = spec.split(':') | 1453 cl.index(m.group(2)) |
| 1207 except (ValueError, TypeError): | 1454 except IndexError: |
| 1208 raise UsageError(_('"%(spec)s" not ' | 1455 raise UsageError(_('no such item "%(designator)s"') % { |
| 1209 'name:width') % locals()) | 1456 'designator': arg}) |
| 1210 else: | 1457 else: |
| 1211 propname = spec | 1458 cl = self.get_class(arg) |
| 1212 if propname not in all_props: | 1459 self.db.reindex(arg) |
| 1213 raise UsageError(_('%(classname)s has no property ' | |
| 1214 '"%(propname)s"') % locals()) | |
| 1215 else: | 1460 else: |
| 1216 prop_names = cl.getprops() | 1461 self.db.reindex(show_progress=True) |
| 1217 | |
| 1218 # now figure column widths | |
| 1219 props = [] | |
| 1220 for spec in prop_names: | |
| 1221 if ':' in spec: | |
| 1222 name, width = spec.split(':') | |
| 1223 if width == '': | |
| 1224 # spec includes trailing :, use label/name width | |
| 1225 props.append((name, len(name))) | |
| 1226 else: | |
| 1227 try: | |
| 1228 props.append((name, int(width))) | |
| 1229 except ValueError: | |
| 1230 raise UsageError(_('"%(spec)s" does not have an ' | |
| 1231 'integer width: "%(width)s"') % | |
| 1232 locals()) | |
| 1233 else: | |
| 1234 # this is going to be slow | |
| 1235 maxlen = len(spec) | |
| 1236 for nodeid in cl.list(): | |
| 1237 curlen = len(str(cl.get(nodeid, spec))) | |
| 1238 if curlen > maxlen: | |
| 1239 maxlen = curlen | |
| 1240 props.append((spec, maxlen)) | |
| 1241 | |
| 1242 # now display the heading | |
| 1243 print(' '.join([name.capitalize().ljust(width) | |
| 1244 for name, width in props])) | |
| 1245 | |
| 1246 # and the table data | |
| 1247 for nodeid in cl.list(): | |
| 1248 table_columns = [] | |
| 1249 for name, width in props: | |
| 1250 if name != 'id': | |
| 1251 try: | |
| 1252 value = str(cl.get(nodeid, name)) | |
| 1253 except KeyError: | |
| 1254 # we already checked if the property is valid - a | |
| 1255 # KeyError here means the node just doesn't have a | |
| 1256 # value for it | |
| 1257 value = '' | |
| 1258 else: | |
| 1259 value = str(nodeid) | |
| 1260 f = '%%-%ds' % width | |
| 1261 table_columns.append(f % value[:width]) | |
| 1262 print(' '.join(table_columns)) | |
| 1263 return 0 | |
| 1264 | |
| 1265 def do_history(self, args): | |
| 1266 ''"""Usage: history designator [skipquiet] | |
| 1267 Show the history entries of a designator. | |
| 1268 | |
| 1269 A designator is a classname and a nodeid concatenated, | |
| 1270 eg. bug1, user10, ... | |
| 1271 | |
| 1272 Lists the journal entries viewable by the user for the | |
| 1273 node identified by the designator. If skipquiet is the | |
| 1274 second argument, journal entries for quiet properties | |
| 1275 are not shown. | |
| 1276 """ | |
| 1277 | |
| 1278 if len(args) < 1: | |
| 1279 raise UsageError(_('Not enough arguments supplied')) | |
| 1280 try: | |
| 1281 classname, nodeid = hyperdb.splitDesignator(args[0]) | |
| 1282 except hyperdb.DesignatorError as message: | |
| 1283 raise UsageError(message) | |
| 1284 | |
| 1285 skipquiet = False | |
| 1286 if len(args) == 2: | |
| 1287 if args[1] != 'skipquiet': | |
| 1288 raise UsageError("Second argument is not skipquiet") | |
| 1289 skipquiet = True | |
| 1290 | |
| 1291 try: | |
| 1292 print(self.db.getclass(classname).history(nodeid, | |
| 1293 skipquiet=skipquiet)) | |
| 1294 except KeyError: | |
| 1295 raise UsageError(_('no such class "%(classname)s"') % locals()) | |
| 1296 except IndexError: | |
| 1297 raise UsageError(_('no such %(classname)s node ' | |
| 1298 '"%(nodeid)s"') % locals()) | |
| 1299 return 0 | |
| 1300 | |
| 1301 def do_commit(self, args): | |
| 1302 ''"""Usage: commit | |
| 1303 Commit changes made to the database during an interactive session. | |
| 1304 | |
| 1305 The changes made during an interactive session are not | |
| 1306 automatically written to the database - they must be committed | |
| 1307 using this command. | |
| 1308 | |
| 1309 One-off commands on the command-line are automatically committed if | |
| 1310 they are successful. | |
| 1311 """ | |
| 1312 self.db.commit() | |
| 1313 self.db_uncommitted = False | |
| 1314 return 0 | |
| 1315 | |
| 1316 def do_rollback(self, args): | |
| 1317 ''"""Usage: rollback | |
| 1318 Undo all changes that are pending commit to the database. | |
| 1319 | |
| 1320 The changes made during an interactive session are not | |
| 1321 automatically written to the database - they must be committed | |
| 1322 manually. This command undoes all those changes, so a commit | |
| 1323 immediately after would make no changes to the database. | |
| 1324 """ | |
| 1325 self.db.rollback() | |
| 1326 self.db_uncommitted = False | |
| 1327 return 0 | |
| 1328 | |
| 1329 def do_retire(self, args): | |
| 1330 ''"""Usage: retire designator[,designator]* | |
| 1331 Retire the node specified by designator. | |
| 1332 | |
| 1333 A designator is a classname and a nodeid concatenated, | |
| 1334 eg. bug1, user10, ... | |
| 1335 | |
| 1336 This action indicates that a particular node is not to be retrieved | |
| 1337 by the list or find commands, and its key value may be re-used. | |
| 1338 """ | |
| 1339 if len(args) < 1: | |
| 1340 raise UsageError(_('Not enough arguments supplied')) | |
| 1341 designators = args[0].split(',') | |
| 1342 for designator in designators: | |
| 1343 try: | |
| 1344 classname, nodeid = hyperdb.splitDesignator(designator) | |
| 1345 except hyperdb.DesignatorError as message: | |
| 1346 raise UsageError(message) | |
| 1347 try: | |
| 1348 self.db.getclass(classname).retire(nodeid) | |
| 1349 except KeyError: | |
| 1350 raise UsageError(_('no such class "%(classname)s"') % locals()) | |
| 1351 except IndexError: | |
| 1352 raise UsageError(_('no such %(classname)s node ' | |
| 1353 '"%(nodeid)s"') % locals()) | |
| 1354 self.db_uncommitted = True | |
| 1355 return 0 | 1462 return 0 |
| 1356 | 1463 |
| 1357 def do_restore(self, args): | 1464 def do_restore(self, args): |
| 1358 ''"""Usage: restore designator[,designator]* | 1465 ''"""Usage: restore designator[,designator]* |
| 1359 Restore the retired node specified by designator. | 1466 Restore the retired node specified by designator. |
| 1383 raise UsageError(_('no such %(classname)s node ' | 1490 raise UsageError(_('no such %(classname)s node ' |
| 1384 '" % (nodeid)s"') % locals()) | 1491 '" % (nodeid)s"') % locals()) |
| 1385 self.db_uncommitted = True | 1492 self.db_uncommitted = True |
| 1386 return 0 | 1493 return 0 |
| 1387 | 1494 |
| 1388 def do_export(self, args, export_files=True): | 1495 def do_retire(self, args): |
| 1389 ''"""Usage: export [[-]class[,class]] export_dir | 1496 ''"""Usage: retire designator[,designator]* |
| 1390 Export the database to colon-separated-value files. | 1497 Retire the node specified by designator. |
| 1391 To exclude the files (e.g. for the msg or file class), | 1498 |
| 1392 use the exporttables command. | 1499 A designator is a classname and a nodeid concatenated, |
| 1393 | 1500 eg. bug1, user10, ... |
| 1394 Optionally limit the export to just the named classes | 1501 |
| 1395 or exclude the named classes, if the 1st argument starts with '-'. | 1502 This action indicates that a particular node is not to be retrieved |
| 1396 | 1503 by the list or find commands, and its key value may be re-used. |
| 1397 This action exports the current data from the database into | 1504 """ |
| 1398 colon-separated-value files that are placed in the nominated | |
| 1399 destination directory. | |
| 1400 """ | |
| 1401 # grab the directory to export to | |
| 1402 if len(args) < 1: | 1505 if len(args) < 1: |
| 1403 raise UsageError(_('Not enough arguments supplied')) | 1506 raise UsageError(_('Not enough arguments supplied')) |
| 1404 | 1507 designators = args[0].split(',') |
| 1405 dir = args[-1] | 1508 for designator in designators: |
| 1406 | 1509 try: |
| 1407 # get the list of classes to export | 1510 classname, nodeid = hyperdb.splitDesignator(designator) |
| 1408 if len(args) == 2: | 1511 except hyperdb.DesignatorError as message: |
| 1409 if args[0].startswith('-'): | 1512 raise UsageError(message) |
| 1410 classes = [c for c in self.db.classes | 1513 try: |
| 1411 if c not in args[0][1:].split(',')] | 1514 self.db.getclass(classname).retire(nodeid) |
| 1412 else: | 1515 except KeyError: |
| 1413 classes = args[0].split(',') | 1516 raise UsageError(_('no such class "%(classname)s"') % locals()) |
| 1414 else: | 1517 except IndexError: |
| 1415 classes = self.db.classes | 1518 raise UsageError(_('no such %(classname)s node ' |
| 1416 | 1519 '"%(nodeid)s"') % locals()) |
| 1417 class colon_separated(csv.excel): | |
| 1418 delimiter = ':' | |
| 1419 | |
| 1420 # make sure target dir exists | |
| 1421 if not os.path.exists(dir): | |
| 1422 os.makedirs(dir) | |
| 1423 | |
| 1424 # maximum csv field length exceeding configured size? | |
| 1425 max_len = self.db.config.CSV_FIELD_SIZE | |
| 1426 | |
| 1427 # do all the classes specified | |
| 1428 for classname in classes: | |
| 1429 cl = self.get_class(classname) | |
| 1430 | |
| 1431 if not export_files and hasattr(cl, 'export_files'): | |
| 1432 sys.stdout.write('Exporting %s WITHOUT the files\r\n' % | |
| 1433 classname) | |
| 1434 | |
| 1435 with open(os.path.join(dir, classname+'.csv'), 'w') as f: | |
| 1436 writer = csv.writer(f, colon_separated) | |
| 1437 | |
| 1438 propnames = cl.export_propnames() | |
| 1439 fields = propnames[:] | |
| 1440 fields.append('is retired') | |
| 1441 writer.writerow(fields) | |
| 1442 | |
| 1443 # If a node has a key, sort all nodes by key | |
| 1444 # with retired nodes first. Retired nodes | |
| 1445 # must occur before a non-retired node with | |
| 1446 # the same key. Otherwise you get an | |
| 1447 # IntegrityError: UNIQUE constraint failed: | |
| 1448 # _class.__retired__, _<class>._<keyname> | |
| 1449 # on imports to rdbms. | |
| 1450 all_nodes = cl.getnodeids() | |
| 1451 | |
| 1452 classkey = cl.getkey() | |
| 1453 if classkey: # False sorts before True, so negate is_retired | |
| 1454 keysort = lambda i: (cl.get(i, classkey), # noqa: E731 | |
| 1455 not cl.is_retired(i)) | |
| 1456 all_nodes.sort(key=keysort) | |
| 1457 # if there is no classkey no need to sort | |
| 1458 | |
| 1459 for nodeid in all_nodes: | |
| 1460 if self.verbose: | |
| 1461 sys.stdout.write('\rExporting %s - %s' % | |
| 1462 (classname, nodeid)) | |
| 1463 sys.stdout.flush() | |
| 1464 node = cl.getnode(nodeid) | |
| 1465 exp = cl.export_list(propnames, nodeid) | |
| 1466 lensum = sum([len(repr_export(node[p])) for | |
| 1467 p in propnames]) | |
| 1468 # for a safe upper bound of field length we add | |
| 1469 # difference between CSV len and sum of all field lengths | |
| 1470 d = sum([len(x) for x in exp]) - lensum | |
| 1471 if not d > 0: | |
| 1472 raise AssertionError("Bad assertion d > 0") | |
| 1473 for p in propnames: | |
| 1474 ll = len(repr_export(node[p])) + d | |
| 1475 if ll > max_len: | |
| 1476 max_len = ll | |
| 1477 writer.writerow(exp) | |
| 1478 if export_files and hasattr(cl, 'export_files'): | |
| 1479 cl.export_files(dir, nodeid) | |
| 1480 | |
| 1481 # export the journals | |
| 1482 with open(os.path.join(dir, classname+'-journals.csv'), 'w') as jf: | |
| 1483 if self.verbose: | |
| 1484 sys.stdout.write("\nExporting Journal for %s\n" % | |
| 1485 classname) | |
| 1486 sys.stdout.flush() | |
| 1487 journals = csv.writer(jf, colon_separated) | |
| 1488 for row in cl.export_journals(): | |
| 1489 journals.writerow(row) | |
| 1490 if max_len > self.db.config.CSV_FIELD_SIZE: | |
| 1491 print("Warning: config csv_field_size should be at least %s" % | |
| 1492 max_len, file=sys.stderr) | |
| 1493 return 0 | |
| 1494 | |
| 1495 def do_exporttables(self, args): | |
| 1496 ''"""Usage: exporttables [[-]class[,class]] export_dir | |
| 1497 Export the database to colon-separated-value files, excluding the | |
| 1498 files below $TRACKER_HOME/db/files/ (which can be archived separately). | |
| 1499 To include the files, use the export command. | |
| 1500 | |
| 1501 Optionally limit the export to just the named classes | |
| 1502 or exclude the named classes, if the 1st argument starts with '-'. | |
| 1503 | |
| 1504 This action exports the current data from the database into | |
| 1505 colon-separated-value files that are placed in the nominated | |
| 1506 destination directory. | |
| 1507 """ | |
| 1508 return self.do_export(args, export_files=False) | |
| 1509 | |
| 1510 def do_import(self, args, import_files=True): | |
| 1511 ''"""Usage: import import_dir | |
| 1512 Import a database from the directory containing CSV files, | |
| 1513 two per class to import. | |
| 1514 | |
| 1515 The files used in the import are: | |
| 1516 | |
| 1517 <class>.csv | |
| 1518 This must define the same properties as the class (including | |
| 1519 having a "header" line with those property names.) | |
| 1520 <class>-journals.csv | |
| 1521 This defines the journals for the items being imported. | |
| 1522 | |
| 1523 The imported nodes will have the same nodeid as defined in the | |
| 1524 import file, thus replacing any existing content. | |
| 1525 | |
| 1526 The new nodes are added to the existing database - if you want to | |
| 1527 create a new database using the imported data, then create a new | |
| 1528 database (or, tediously, retire all the old data.) | |
| 1529 """ | |
| 1530 if len(args) < 1: | |
| 1531 raise UsageError(_('Not enough arguments supplied')) | |
| 1532 | |
| 1533 if hasattr(csv, 'field_size_limit'): | |
| 1534 csv.field_size_limit(self.db.config.CSV_FIELD_SIZE) | |
| 1535 | |
| 1536 # directory to import from | |
| 1537 dir = args[0] | |
| 1538 | |
| 1539 class colon_separated(csv.excel): | |
| 1540 delimiter = ':' | |
| 1541 | |
| 1542 # import all the files | |
| 1543 for file in os.listdir(dir): | |
| 1544 classname, ext = os.path.splitext(file) | |
| 1545 # we only care about CSV files | |
| 1546 if ext != '.csv' or classname.endswith('-journals'): | |
| 1547 continue | |
| 1548 | |
| 1549 cl = self.get_class(classname) | |
| 1550 | |
| 1551 # ensure that the properties and the CSV file headings match | |
| 1552 with open(os.path.join(dir, file), 'r') as f: | |
| 1553 reader = csv.reader(f, colon_separated) | |
| 1554 file_props = None | |
| 1555 maxid = 1 | |
| 1556 # loop through the file and create a node for each entry | |
| 1557 for n, r in enumerate(reader): | |
| 1558 if file_props is None: | |
| 1559 file_props = r | |
| 1560 continue | |
| 1561 | |
| 1562 if self.verbose: | |
| 1563 sys.stdout.write('\rImporting %s - %s' % (classname, n)) | |
| 1564 sys.stdout.flush() | |
| 1565 | |
| 1566 # do the import and figure the current highest nodeid | |
| 1567 nodeid = cl.import_list(file_props, r) | |
| 1568 if hasattr(cl, 'import_files') and import_files: | |
| 1569 cl.import_files(dir, nodeid) | |
| 1570 maxid = max(maxid, int(nodeid)) | |
| 1571 | |
| 1572 # (print to sys.stdout here to allow tests to squash it .. ugh) | |
| 1573 print(file=sys.stdout) | |
| 1574 | |
| 1575 # import the journals | |
| 1576 with open(os.path.join(args[0], classname + '-journals.csv'), 'r') as f: | |
| 1577 reader = csv.reader(f, colon_separated) | |
| 1578 cl.import_journals(reader) | |
| 1579 | |
| 1580 # (print to sys.stdout here to allow tests to squash it .. ugh) | |
| 1581 print('setting', classname, maxid+1, file=sys.stdout) | |
| 1582 | |
| 1583 # set the id counter | |
| 1584 self.db.setid(classname, str(maxid+1)) | |
| 1585 | |
| 1586 self.db_uncommitted = True | 1520 self.db_uncommitted = True |
| 1587 return 0 | 1521 return 0 |
| 1588 | 1522 |
| 1589 def do_importtables(self, args): | 1523 def do_rollback(self, args): |
| 1590 ''"""Usage: importtables export_dir | 1524 ''"""Usage: rollback |
| 1591 | 1525 Undo all changes that are pending commit to the database. |
| 1592 This imports the database tables exported using exporttables. | 1526 |
| 1593 """ | 1527 The changes made during an interactive session are not |
| 1594 return self.do_import(args, import_files=False) | 1528 automatically written to the database - they must be committed |
| 1595 | 1529 manually. This command undoes all those changes, so a commit |
| 1596 def do_pack(self, args): | 1530 immediately after would make no changes to the database. |
| 1597 ''"""Usage: pack period | date | 1531 """ |
| 1598 | 1532 self.db.rollback() |
| 1599 Remove journal entries older than a period of time specified or | 1533 self.db_uncommitted = False |
| 1600 before a certain date. | |
| 1601 | |
| 1602 A period is specified using the suffixes "y", "m", and "d". The | |
| 1603 suffix "w" (for "week") means 7 days. | |
| 1604 | |
| 1605 "3y" means three years | |
| 1606 "2y 1m" means two years and one month | |
| 1607 "1m 25d" means one month and 25 days | |
| 1608 "2w 3d" means two weeks and three days | |
| 1609 | |
| 1610 Date format is "YYYY-MM-DD" eg: | |
| 1611 2001-01-01 | |
| 1612 | |
| 1613 """ | |
| 1614 if len(args) != 1: | |
| 1615 raise UsageError(_('Not enough arguments supplied')) | |
| 1616 | |
| 1617 # are we dealing with a period or a date | |
| 1618 value = args[0] | |
| 1619 date_re = re.compile(r""" | |
| 1620 (?P<date>\d\d\d\d-\d\d?-\d\d?)? # yyyy-mm-dd | |
| 1621 (?P<period>(\d+y\s*)?(\d+m\s*)?(\d+d\s*)?)? | |
| 1622 """, re.VERBOSE) | |
| 1623 m = date_re.match(value) | |
| 1624 if not m: | |
| 1625 raise ValueError(_('Invalid format')) | |
| 1626 m = m.groupdict() | |
| 1627 if m['period']: | |
| 1628 pack_before = date.Date(". - %s" % value) | |
| 1629 elif m['date']: | |
| 1630 pack_before = date.Date(value) | |
| 1631 self.db.pack(pack_before) | |
| 1632 self.db_uncommitted = True | |
| 1633 return 0 | |
| 1634 | |
| 1635 designator_re = re.compile('([A-Za-z]+)([0-9]+)') | |
| 1636 | |
| 1637 def do_reindex(self, args, desre=designator_re): | |
| 1638 ''"""Usage: reindex [classname|designator]* | |
| 1639 Re-generate a tracker's search indexes. | |
| 1640 | |
| 1641 This will re-generate the search indexes for a tracker. | |
| 1642 This will typically happen automatically. | |
| 1643 """ | |
| 1644 if args: | |
| 1645 for arg in args: | |
| 1646 m = desre.match(arg) | |
| 1647 if m: | |
| 1648 cl = self.get_class(m.group(1)) | |
| 1649 try: | |
| 1650 cl.index(m.group(2)) | |
| 1651 except IndexError: | |
| 1652 raise UsageError(_('no such item "%(designator)s"') % { | |
| 1653 'designator': arg}) | |
| 1654 else: | |
| 1655 cl = self.get_class(arg) | |
| 1656 self.db.reindex(arg) | |
| 1657 else: | |
| 1658 self.db.reindex(show_progress=True) | |
| 1659 return 0 | 1534 return 0 |
| 1660 | 1535 |
| 1661 def do_security(self, args): | 1536 def do_security(self, args): |
| 1662 ''"""Usage: security [Role name] | 1537 ''"""Usage: security [Role name] |
| 1663 | 1538 |
| 1715 '"%(klass)s" only)\n') % d) | 1590 '"%(klass)s" only)\n') % d) |
| 1716 else: | 1591 else: |
| 1717 sys.stdout.write(_(' %(description)s (%(name)s)\n') % d) | 1592 sys.stdout.write(_(' %(description)s (%(name)s)\n') % d) |
| 1718 return 0 | 1593 return 0 |
| 1719 | 1594 |
| 1720 def do_migrate(self, args): | 1595 def do_set(self, args): |
| 1721 ''"""Usage: migrate | 1596 ''"""Usage: set items property=value property=value ... |
| 1722 | 1597 Set the given properties of one or more items(s). |
| 1723 Update a tracker's database to be compatible with the Roundup | 1598 |
| 1724 codebase. | 1599 The items are specified as a class or as a comma-separated |
| 1725 | 1600 list of item designators (ie "designator[,designator,...]"). |
| 1726 You should run the "migrate" command for your tracker once | 1601 |
| 1727 you've installed the latest codebase. | 1602 A designator is a classname and a nodeid concatenated, |
| 1728 | 1603 eg. bug1, user10, ... |
| 1729 Do this before you use the web, command-line or mail interface | 1604 |
| 1730 and before any users access the tracker. | 1605 This command sets the properties to the values for all |
| 1731 | 1606 designators given. If a class is used, the property will be |
| 1732 This command will respond with either "Tracker updated" (if | 1607 set for all items in the class. If the value is missing |
| 1733 you've not previously run it on an RDBMS backend) or "No | 1608 (ie. "property=") then the property is un-set. If the property |
| 1734 migration action required" (if you have run it, or have used | 1609 is a multilink, you specify the linked ids for the multilink |
| 1735 another interface to the tracker, or possibly because you are | 1610 as comma-separated numbers (ie "1,2,3"). |
| 1736 using anydbm). | 1611 |
| 1737 | 1612 """ |
| 1738 It's safe to run this even if it's not required, so just get | 1613 import copy # needed for copying props list |
| 1739 into the habit. | 1614 |
| 1740 """ | 1615 if len(args) < 2: |
| 1741 if self.db.db_version_updated: | 1616 raise UsageError(_('Not enough arguments supplied')) |
| 1742 print(_('Tracker updated to schema version %s.') % | 1617 from roundup import hyperdb |
| 1743 self.db.database_schema['version']) | 1618 |
| 1744 self.db_uncommitted = True | 1619 designators = args[0].split(',') |
| 1620 if len(designators) == 1: | |
| 1621 designator = designators[0] | |
| 1622 try: | |
| 1623 designator = hyperdb.splitDesignator(designator) | |
| 1624 designators = [designator] | |
| 1625 except hyperdb.DesignatorError: | |
| 1626 cl = self.get_class(designator) | |
| 1627 designators = [(designator, x) for x in cl.list()] | |
| 1745 else: | 1628 else: |
| 1746 print(_('No migration action required. At schema version %s.') % | |
| 1747 self.db.database_schema['version']) | |
| 1748 return 0 | |
| 1749 | |
| 1750 def do_perftest(self, args): | |
| 1751 ''"""Usage: perftest [mode] [arguments]* | |
| 1752 | |
| 1753 Time operations in Roundup. Supported arguments: | |
| 1754 | |
| 1755 [password] [rounds=<integer>] [scheme=<scheme>] | |
| 1756 | |
| 1757 'password' is the default mode. The tracker's config.ini | |
| 1758 setting for 'password_pbkdf2_default_rounds' is the default | |
| 1759 value for 'rounds'. On the command line, 'rounds' can include | |
| 1760 thousands separator of ',' or '.'. 'scheme' is the default | |
| 1761 coded into Roundup. List supported schemes by using 'scheme='. | |
| 1762 | |
| 1763 """ | |
| 1764 from roundup.anypy.time_ import perf_counter | |
| 1765 | |
| 1766 props = {"rounds": self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS, | |
| 1767 "scheme": password.Password.known_schemes[0]} | |
| 1768 | |
| 1769 print_supported_schemes = lambda: print( | |
| 1770 "Supported schemes (default is first, case " | |
| 1771 "sensitive):\n %s." % | |
| 1772 ", ".join(password.Password.known_schemes)) | |
| 1773 | |
| 1774 if (args[0].find("=") != -1): | |
| 1775 args.insert(0, 'password') | |
| 1776 | |
| 1777 props.update(self.props_from_args(args[1:])) | |
| 1778 | |
| 1779 if args[0] == "password": | |
| 1780 try: | 1629 try: |
| 1781 # convert 10,000,000 or 10.000.000 to 10000000 | 1630 designators = [hyperdb.splitDesignator(x) for x in designators] |
| 1782 r = int(re.sub('[,.]', '', props['rounds'])) | 1631 except hyperdb.DesignatorError as message: |
| 1783 if r < 1000: | 1632 raise UsageError(message) |
| 1784 print(_("Invalid 'rounds'. Must be larger than 999.")) | 1633 |
| 1785 return | 1634 # get the props from the args |
| 1786 props['rounds'] = r | 1635 propset = self.props_from_args(args[1:]) # parse the cli once |
| 1787 except (TypeError, ValueError): | 1636 |
| 1788 print(_("Invalid 'rounds'. It must be an integer not: %s") % | 1637 # now do the set for all the nodes |
| 1789 props['rounds']) | 1638 for classname, itemid in designators: |
| 1790 return | 1639 props = copy.copy(propset) # make a new copy for every designator |
| 1791 if props['scheme'] is None: | 1640 cl = self.get_class(classname) |
| 1792 print_supported_schemes() | 1641 |
| 1793 return | 1642 for key, value in list(props.items()): |
| 1794 | 1643 try: |
| 1795 self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS = props['rounds'] | 1644 # You must reinitialize the props every time though. |
| 1796 | 1645 # if props['nosy'] = '+admin' initally, it gets |
| 1646 # set to 'demo,admin' (assuming it was set to demo | |
| 1647 # in the db) after rawToHyperdb returns. | |
| 1648 # This new value is used for all the rest of the | |
| 1649 # designators if not reinitalized. | |
| 1650 props[key] = hyperdb.rawToHyperdb(self.db, cl, itemid, | |
| 1651 key, value) | |
| 1652 except hyperdb.HyperdbValueError as message: | |
| 1653 raise UsageError(message) | |
| 1654 | |
| 1655 # try the set | |
| 1797 try: | 1656 try: |
| 1798 tic = perf_counter() | 1657 cl.set(itemid, **props) |
| 1799 pw_hash = password.encodePassword( | 1658 except (TypeError, IndexError, ValueError) as message: |
| 1800 "this is a long password to hash", | 1659 raise UsageError(message) |
| 1801 props['scheme'], | 1660 self.db_uncommitted = True |
| 1802 None, | 1661 return 0 |
| 1803 config=self.db.config | 1662 |
| 1804 ) | 1663 def do_specification(self, args): |
| 1805 toc = perf_counter() | 1664 ''"""Usage: specification classname |
| 1806 except password.PasswordValueError as e: | 1665 Show the properties for a classname. |
| 1807 print(e) | 1666 |
| 1808 print_supported_schemes() | 1667 This lists the properties for a given class. |
| 1809 return | 1668 """ |
| 1810 | 1669 if len(args) < 1: |
| 1811 if props['scheme'].startswith('PBKDF2'): | 1670 raise UsageError(_('Not enough arguments supplied')) |
| 1812 (rounds, salt, _raw_salt, digest) = password.pbkdf2_unpack( | 1671 classname = args[0] |
| 1813 pw_hash) | 1672 # get the class |
| 1673 cl = self.get_class(classname) | |
| 1674 | |
| 1675 # get the key property | |
| 1676 keyprop = cl.getkey() | |
| 1677 for key in cl.properties: | |
| 1678 value = cl.properties[key] | |
| 1679 if keyprop == key: | |
| 1680 sys.stdout.write(_('%(key)s: %(value)s (key property)\n') % | |
| 1681 locals()) | |
| 1814 else: | 1682 else: |
| 1815 rounds = _("scheme does not support rounds.") | 1683 sys.stdout.write(_('%(key)s: %(value)s\n') % locals()) |
| 1816 | 1684 |
| 1817 print(_( | 1685 def do_table(self, args): |
| 1818 "Hash time: %(time)0.9f seconds, scheme: %(scheme)s, " | 1686 ''"""Usage: table classname [property[,property]*] |
| 1819 "rounds: %(rounds)s") % | 1687 List the instances of a class in tabular form. |
| 1820 {"time": toc-tic, "scheme": props['scheme'], | 1688 |
| 1821 "rounds": rounds}) | 1689 Lists all instances of the given class. If the properties are not |
| 1690 specified, all properties are displayed. By default, the column | |
| 1691 widths are the width of the largest value. The width may be | |
| 1692 explicitly defined by defining the property as "name:width". | |
| 1693 For example:: | |
| 1694 | |
| 1695 roundup> table priority id,name:10 | |
| 1696 Id Name | |
| 1697 1 fatal-bug | |
| 1698 2 bug | |
| 1699 3 usability | |
| 1700 4 feature | |
| 1701 | |
| 1702 Also to make the width of the column the width of the label, | |
| 1703 leave a trailing : without a width on the property. For example:: | |
| 1704 | |
| 1705 roundup> table priority id,name: | |
| 1706 Id Name | |
| 1707 1 fata | |
| 1708 2 bug | |
| 1709 3 usab | |
| 1710 4 feat | |
| 1711 | |
| 1712 will result in a the 4 character wide "Name" column. | |
| 1713 """ | |
| 1714 if len(args) < 1: | |
| 1715 raise UsageError(_('Not enough arguments supplied')) | |
| 1716 classname = args[0] | |
| 1717 | |
| 1718 # get the class | |
| 1719 cl = self.get_class(classname) | |
| 1720 | |
| 1721 # figure the property names to display | |
| 1722 if len(args) > 1: | |
| 1723 prop_names = args[1].split(',') | |
| 1724 all_props = cl.getprops() | |
| 1725 for spec in prop_names: | |
| 1726 if ':' in spec: | |
| 1727 try: | |
| 1728 propname, width = spec.split(':') | |
| 1729 except (ValueError, TypeError): | |
| 1730 raise UsageError(_('"%(spec)s" not ' | |
| 1731 'name:width') % locals()) | |
| 1732 else: | |
| 1733 propname = spec | |
| 1734 if propname not in all_props: | |
| 1735 raise UsageError(_('%(classname)s has no property ' | |
| 1736 '"%(propname)s"') % locals()) | |
| 1737 else: | |
| 1738 prop_names = cl.getprops() | |
| 1739 | |
| 1740 # now figure column widths | |
| 1741 props = [] | |
| 1742 for spec in prop_names: | |
| 1743 if ':' in spec: | |
| 1744 name, width = spec.split(':') | |
| 1745 if width == '': | |
| 1746 # spec includes trailing :, use label/name width | |
| 1747 props.append((name, len(name))) | |
| 1748 else: | |
| 1749 try: | |
| 1750 props.append((name, int(width))) | |
| 1751 except ValueError: | |
| 1752 raise UsageError(_('"%(spec)s" does not have an ' | |
| 1753 'integer width: "%(width)s"') % | |
| 1754 locals()) | |
| 1755 else: | |
| 1756 # this is going to be slow | |
| 1757 maxlen = len(spec) | |
| 1758 for nodeid in cl.list(): | |
| 1759 curlen = len(str(cl.get(nodeid, spec))) | |
| 1760 if curlen > maxlen: | |
| 1761 maxlen = curlen | |
| 1762 props.append((spec, maxlen)) | |
| 1763 | |
| 1764 # now display the heading | |
| 1765 print(' '.join([name.capitalize().ljust(width) | |
| 1766 for name, width in props])) | |
| 1767 | |
| 1768 # and the table data | |
| 1769 for nodeid in cl.list(): | |
| 1770 table_columns = [] | |
| 1771 for name, width in props: | |
| 1772 if name != 'id': | |
| 1773 try: | |
| 1774 value = str(cl.get(nodeid, name)) | |
| 1775 except KeyError: | |
| 1776 # we already checked if the property is valid - a | |
| 1777 # KeyError here means the node just doesn't have a | |
| 1778 # value for it | |
| 1779 value = '' | |
| 1780 else: | |
| 1781 value = str(nodeid) | |
| 1782 f = '%%-%ds' % width | |
| 1783 table_columns.append(f % value[:width]) | |
| 1784 print(' '.join(table_columns)) | |
| 1785 return 0 | |
| 1786 | |
| 1787 def do_templates(self, args): | |
| 1788 ''"""Usage: templates [trace_search] | |
| 1789 List templates and their installed directories. | |
| 1790 | |
| 1791 With trace_search also list all directories that are | |
| 1792 searched for templates. | |
| 1793 """ | |
| 1794 import textwrap | |
| 1795 | |
| 1796 trace_search = False | |
| 1797 if args and args[0] == "trace_search": | |
| 1798 trace_search = True | |
| 1799 | |
| 1800 templates = self.listTemplates(trace_search=trace_search) | |
| 1801 | |
| 1802 for name in sorted(list(templates.keys())): | |
| 1803 templates[name]['description'] = textwrap.fill( | |
| 1804 "\n".join([line.lstrip() for line in | |
| 1805 templates[name]['description'].split("\n")]), | |
| 1806 70, | |
| 1807 subsequent_indent=" " | |
| 1808 ) | |
| 1809 print(""" | |
| 1810 Name: %(name)s | |
| 1811 Path: %(path)s | |
| 1812 Desc: %(description)s | |
| 1813 """ % templates[name]) | |
| 1814 | |
| 1815 def do_updateconfig(self, args): | |
| 1816 ''"""Usage: updateconfig <filename> | |
| 1817 Generate an updated tracker config file (ini style) in | |
| 1818 <filename>. Use current settings from existing roundup | |
| 1819 tracker in tracker home. | |
| 1820 """ | |
| 1821 self.do_genconfig(args, update=True) | |
| 1822 | 1822 |
| 1823 def run_command(self, args): | 1823 def run_command(self, args): |
| 1824 """Run a single command | 1824 """Run a single command |
| 1825 """ | 1825 """ |
| 1826 command = args[0] | 1826 command = args[0] |
