diff roundup/admin.py @ 7250:ee972b3073cb

alphabetize all do_X methods. Makes it easier to figure out where the method is defined and provides a structure for adding new methods. No functional changes just moving existing functions around.
author John Rouillard <rouilj@ieee.org>
date Wed, 12 Apr 2023 09:52:51 -0400
parents bd2dc1484b39
children 9c067ed4568b
line wrap: on
line diff
--- a/roundup/admin.py	Tue Apr 11 22:06:07 2023 -0400
+++ b/roundup/admin.py	Wed Apr 12 09:52:51 2023 -0400
@@ -267,45 +267,6 @@
     nl_re = re.compile('[\r\n]')
     # indent_re defined above
 
-    def do_help(self, args, nl_re=nl_re, indent_re=indent_re):
-        ''"""Usage: help topic
-        Give help about topic.
-
-        commands  -- list commands
-        <command> -- help specific to a command
-        initopts  -- init command options
-        all       -- all available help
-        """
-        if len(args) > 0:
-            topic = args[0]
-        else:
-            topic = 'help'
-
-        # try help_ methods
-        if topic in self.help:
-            self.help[topic]()
-            return 0
-
-        # try command docstrings
-        try:
-            cmd_docs = self.commands.get(topic)
-        except KeyError:
-            print(_('Sorry, no help for "%(topic)s"') % locals())
-            return 1
-
-        # display the help for each match, removing the docstring indent
-        for _name, help in cmd_docs:
-            lines = nl_re.split(_(help.__doc__))
-            print(lines[0])
-            indent = indent_re.match(lines[1])
-            if indent: indent = len(indent.group(1))  # noqa: E701
-            for line in lines[1:]:
-                if indent:
-                    print(line[indent:])
-                else:
-                    print(line)
-        return 0
-
     def listTemplates(self, trace_search=False):
         """ List all the available templates.
 
@@ -410,141 +371,6 @@
         backends = roundup.backends.list_backends()
         print(_('Back ends:'), ', '.join(backends))
 
-    def do_install(self, tracker_home, args):
-        ''"""Usage: install [template [backend [key=val[,key=val]]]]
-        Install a new Roundup tracker.
-
-        The command will prompt for the tracker home directory
-        (if not supplied through TRACKER_HOME or the -i option).
-        The template and backend may be specified on the command-line
-        as arguments, in that order.
-
-        Command line arguments following the backend allows you to
-        pass initial values for config options.  For example, passing
-        "web_http_auth=no,rdbms_user=dinsdale" will override defaults
-        for options http_auth in section [web] and user in section [rdbms].
-        Please be careful to not use spaces in this argument! (Enclose
-        whole argument in quotes if you need spaces in option value).
-
-        The initialise command must be called after this command in order
-        to initialise the tracker's database. You may edit the tracker's
-        initial database contents before running that command by editing
-        the tracker's dbinit.py module init() function.
-
-        See also initopts help.
-        """
-        if len(args) < 1:
-            raise UsageError(_('Not enough arguments supplied'))
-
-        # make sure the tracker home can be created
-        tracker_home = os.path.abspath(tracker_home)
-        parent = os.path.split(tracker_home)[0]
-        if not os.path.exists(parent):
-            raise UsageError(_('Instance home parent directory "%(parent)s"'
-                               ' does not exist') % locals())
-
-        config_ini_file = os.path.join(tracker_home, CoreConfig.INI_FILE)
-        # check for both old- and new-style configs
-        if list(filter(os.path.exists, [config_ini_file,
-                os.path.join(tracker_home, 'config.py')])):
-            if not self.force:
-                ok = self.my_input(_(
-"""WARNING: There appears to be a tracker in "%(tracker_home)s"!
-If you re-install it, you will lose all the data!
-Erase it? Y/N: """) % locals())  # noqa: E122
-                if ok.strip().lower() != 'y':
-                    return 0
-
-            # clear it out so the install isn't confused
-            shutil.rmtree(tracker_home)
-
-        # select template
-        templates = self.listTemplates()
-        template = self._get_choice(
-            list_name=_('Templates:'),
-            prompt=_('Select template'),
-            options=templates,
-            argument=len(args) > 1 and args[1] or '',
-            default='classic')
-
-        # select hyperdb backend
-        import roundup.backends
-        backends = roundup.backends.list_backends()
-        backend = self._get_choice(
-            list_name=_('Back ends:'),
-            prompt=_('Select backend'),
-            options=backends,
-            argument=len(args) > 2 and args[2] or '',
-            default='anydbm')
-        # XXX perform a unit test based on the user's selections
-
-        # Process configuration file definitions
-        if len(args) > 3:
-            try:
-                defns = dict([item.split("=") for item in args[3].split(",")])
-            except Exception:
-                print(_('Error in configuration settings: "%s"') % args[3])
-                raise
-        else:
-            defns = {}
-
-        defns['rdbms_backend'] = backend
-
-        # load config_ini.ini from template if it exists.
-        # it sets parameters like template_engine that are
-        # template specific.
-        template_config = UserConfig(templates[template]['path'] +
-                                     "/config_ini.ini")
-        for k in template_config.keys():
-            if k == 'HOME':  # ignore home. It is a default param.
-                continue
-            defns[k] = template_config[k]
-
-        # install!
-        init.install(tracker_home, templates[template]['path'], settings=defns)
-
-        # Remove config_ini.ini file from tracker_home (not template dir).
-        # Ignore file not found - not all templates have
-        #   config_ini.ini files.
-        try:
-            os.remove(tracker_home + "/config_ini.ini")
-        except OSError as e:  # FileNotFound exception under py3
-            if e.errno == 2:
-                pass
-            else:
-                raise
-
-        print(_("""
----------------------------------------------------------------------------
- You should now edit the tracker configuration file:
-   %(config_file)s""") % {"config_file": config_ini_file})
-
-        # find list of options that need manual adjustments
-        # XXX config._get_unset_options() is marked as private
-        #   (leading underscore).  make it public or don't care?
-        need_set = CoreConfig(tracker_home)._get_unset_options()
-        if need_set:
-            print(_(" ... at a minimum, you must set following options:"))
-            for section in need_set:
-                print("   [%s]: %s" % (section, ", ".join(need_set[section])))
-
-        # note about schema modifications
-        print(_("""
- If you wish to modify the database schema,
- you should also edit the schema file:
-   %(database_config_file)s
- You may also change the database initialisation file:
-   %(database_init_file)s
- ... see the documentation on customizing for more information.
-
- You MUST run the "roundup-admin initialise" command once you've performed
- the above steps.
----------------------------------------------------------------------------
-""") % {'database_config_file': os.path.join(tracker_home, 'schema.py'),
-        'database_init_file': os.path.join(tracker_home, 'initial_data.py')}) \
-        # noqa: E122
-        return 0
-
     def _get_choice(self, list_name, prompt, options, argument, default=None):
         if default is None:
             default = options[0]  # just pick the first one
@@ -559,121 +385,107 @@
                 return default
         return argument
 
-    def do_genconfig(self, args, update=False):
-        ''"""Usage: genconfig <filename>
-        Generate a new tracker config file (ini style) with default
-        values in <filename>.
+    def do_commit(self, args):
+        ''"""Usage: commit
+        Commit changes made to the database during an interactive session.
+
+        The changes made during an interactive session are not
+        automatically written to the database - they must be committed
+        using this command.
+
+        One-off commands on the command-line are automatically committed if
+        they are successful.
+        """
+        self.db.commit()
+        self.db_uncommitted = False
+        return 0
+
+    def do_create(self, args):
+        ''"""Usage: create classname property=value ...
+        Create a new entry of a given class.
+
+        This creates a new entry of the given class using the property
+        name=value arguments provided on the command line after the "create"
+        command.
+        """
+        if len(args) < 1:
+            raise UsageError(_('Not enough arguments supplied'))
+        from roundup import hyperdb
+
+        classname = args[0]
+
+        # get the class
+        cl = self.get_class(classname)
+
+        # now do a create
+        props = {}
+        properties = cl.getprops(protected=0)
+        if len(args) == 1:
+            # ask for the properties
+            for key in properties:
+                if key == 'id': continue  # noqa: E701
+                value = properties[key]
+                name = value.__class__.__name__
+                if isinstance(value, hyperdb.Password):
+                    again = None
+                    while value != again:
+                        value = getpass.getpass(_('%(propname)s (Password): ')
+                                                %
+                                                {'propname': key.capitalize()})
+                        again = getpass.getpass(_('   %(propname)s (Again): ')
+                                                %
+                                                {'propname': key.capitalize()})
+                        if value != again:
+                            print(_('Sorry, try again...'))
+                    if value:
+                        props[key] = value
+                else:
+                    value = self.my_input(_('%(propname)s (%(proptype)s): ') % {
+                        'propname': key.capitalize(), 'proptype': name})
+                    if value:
+                        props[key] = value
+        else:
+            props = self.props_from_args(args[1:])
+
+        # convert types
+        for propname in props:
+            try:
+                props[propname] = hyperdb.rawToHyperdb(self.db, cl, None,
+                                                       propname,
+                                                       props[propname])
+            except hyperdb.HyperdbValueError as message:
+                raise UsageError(message)
+
+        # check for the key property
+        propname = cl.getkey()
+        if propname and propname not in props:
+            raise UsageError(_('you must provide the "%(propname)s" '
+                               'property.') % locals())
+
+        # do the actual create
+        try:
+            sys.stdout.write(cl.create(**props) + '\n')
+        except (TypeError, IndexError, ValueError) as message:
+            raise UsageError(message)
+        self.db_uncommitted = True
+        return 0
+
+    def do_display(self, args):
+        ''"""Usage: display designator[,designator]*
+
+        Show the property values for the given node(s).
+
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
+        This lists the properties and their associated values
+        for the given node.
         """
         if len(args) < 1:
             raise UsageError(_('Not enough arguments supplied'))
 
-        if update:
-            # load current config for writing
-            config = CoreConfig(self.tracker_home)
-
-            # change config to update settings to new defaults
-            # where prior defaults were chosen
-            default_ppdr = config._get_option(
-                'PASSWORD_PBKDF2_DEFAULT_ROUNDS')._default_value
-
-            print("")  # put a blank line before feedback
-            if config.PASSWORD_PBKDF2_DEFAULT_ROUNDS in [10000]:
-                print(_("Changing option\n"
-                        "   'password_pbkdf2_default_rounds'\n"
-                        "from old default of %(old_number)s to new "
-                        "default of %(new_number)s.") % {
-                            "old_number":
-                            config.PASSWORD_PBKDF2_DEFAULT_ROUNDS,
-                            "new_number": default_ppdr
-                        }
-                )
-                config.PASSWORD_PBKDF2_DEFAULT_ROUNDS = default_ppdr
-
-            if config.PASSWORD_PBKDF2_DEFAULT_ROUNDS < default_ppdr:
-                print(_("Update "
-                        "'password_pbkdf2_default_rounds' "
-                        "to a number equal to or larger\nthan %s.") %
-                      default_ppdr)
-        else:
-            # generate default config
-            config = CoreConfig()
-
-        config.save(args[0])
-
-    def do_updateconfig(self, args):
-        ''"""Usage: updateconfig <filename>
-        Generate an updated tracker config file (ini style) in
-        <filename>. Use current settings from existing roundup
-        tracker in tracker home.
-        """
-        self.do_genconfig(args, update=True)
-
-    def do_initialise(self, tracker_home, args):
-        ''"""Usage: initialise [adminpw]
-        Initialise a new Roundup tracker.
-
-        The administrator details will be set at this step.
-
-        Execute the tracker's initialisation function dbinit.init()
-        """
-        # password
-        if len(args) > 1:
-            adminpw = args[1]
-        else:
-            adminpw = ''
-            confirm = 'x'
-            while adminpw != confirm:
-                adminpw = getpass.getpass(_('Admin Password: '))
-                confirm = getpass.getpass(_('       Confirm: '))
-
-        # make sure the tracker home is installed
-        if not os.path.exists(tracker_home):
-            raise UsageError(_('Instance home does not exist') % locals())
-        try:
-            tracker = roundup.instance.open(tracker_home)
-        except roundup.instance.TrackerError:
-            raise UsageError(_('Instance has not been installed') % locals())
-
-        # is there already a database?
-        if tracker.exists():
-            if not self.force:
-                ok = self.my_input(_(
-"""WARNING: The database is already initialised!
-If you re-initialise it, you will lose all the data!
-Erase it? Y/N: """))  # noqa: E122
-                if ok.strip().lower() != 'y':
-                    return 0
-
-            # nuke it
-            tracker.nuke()
-
-        # GO
-        try:
-            tracker.init(password.Password(adminpw, config=tracker.config),
-                         tx_Source='cli')
-        except OptionUnsetError as e:
-            raise UsageError("In %(tracker_home)s/config.ini - %(error)s" % {
-                'error': str(e), 'tracker_home': tracker_home})
-
-        return 0
-
-    def do_get(self, args):
-        ''"""Usage: get property designator[,designator]*
-        Get the given property of one or more designator(s).
-
-        A designator is a classname and a nodeid concatenated,
-        eg. bug1, user10, ...
-
-        Retrieves the property value of the nodes specified
-        by the designators.
-        """
-        if len(args) < 2:
-            raise UsageError(_('Not enough arguments supplied'))
-        propname = args[0]
-        designators = args[1].split(',')
-        linked_props = []
-        for designator in designators:
-            # decode the node designator
+        # decode the node designator
+        for designator in args[0].split(','):
             try:
                 classname, nodeid = hyperdb.splitDesignator(designator)
             except hyperdb.DesignatorError as message:
@@ -681,132 +493,134 @@
 
             # get the class
             cl = self.get_class(classname)
-            try:
-                id = []
-                if self.separator:
-                    if self.print_designator:
-                        # see if property is a link or multilink for
-                        # which getting a desginator make sense.
-                        # Algorithm: Get the properties of the
-                        #     current designator's class. (cl.getprops)
-                        # get the property object for the property the
-                        #     user requested (properties[propname])
-                        # verify its type (isinstance...)
-                        # raise error if not link/multilink
-                        # get class name for link/multilink property
-                        # do the get on the designators
-                        # append the new designators
-                        # print
-                        properties = cl.getprops()
-                        property = properties[propname]
-                        if not (isinstance(property, hyperdb.Multilink) or
-                                isinstance(property, hyperdb.Link)):
-                            raise UsageError(_(
-                                'property %s is not of type'
-                                ' Multilink or Link so -d flag does not '
-                                'apply.') % propname)
-                        propclassname = self.db.getclass(property.classname).classname
-                        id = cl.get(nodeid, propname)
-                        for i in id:
-                            linked_props.append(propclassname + i)
-                    else:
-                        id = cl.get(nodeid, propname)
-                        for i in id:
-                            linked_props.append(i)
-                else:
-                    if self.print_designator:
-                        properties = cl.getprops()
-                        property = properties[propname]
-                        if not (isinstance(property, hyperdb.Multilink) or
-                                isinstance(property, hyperdb.Link)):
-                            raise UsageError(_(
-                                'property %s is not of type'
-                                ' Multilink or Link so -d flag does not '
-                                'apply.') % propname)
-                        propclassname = self.db.getclass(property.classname).classname
-                        id = cl.get(nodeid, propname)
-                        for i in id:
-                            print(propclassname + i)
-                    else:
-                        print(cl.get(nodeid, propname))
-            except IndexError:
-                raise UsageError(_('no such %(classname)s node '
-                                   '"%(nodeid)s"') % locals())
-            except KeyError:
-                raise UsageError(_('no such %(classname)s property '
-                                   '"%(propname)s"') % locals())
-        if self.separator:
-            print(self.separator.join(linked_props))
+
+            # display the values
+            keys = sorted(cl.properties)
+            for key in keys:
+                value = cl.get(nodeid, key)
+                print(_('%(key)s: %(value)s') % locals())
+
+    def do_export(self, args, export_files=True):
+        ''"""Usage: export [[-]class[,class]] export_dir
+        Export the database to colon-separated-value files.
+        To exclude the files (e.g. for the msg or file class),
+        use the exporttables command.
+
+        Optionally limit the export to just the named classes
+        or exclude the named classes, if the 1st argument starts with '-'.
+
+        This action exports the current data from the database into
+        colon-separated-value files that are placed in the nominated
+        destination directory.
+        """
+        # grab the directory to export to
+        if len(args) < 1:
+            raise UsageError(_('Not enough arguments supplied'))
+
+        dir = args[-1]
+
+        # get the list of classes to export
+        if len(args) == 2:
+            if args[0].startswith('-'):
+                classes = [c for c in self.db.classes
+                           if c not in args[0][1:].split(',')]
+            else:
+                classes = args[0].split(',')
+        else:
+            classes = self.db.classes
+
+        class colon_separated(csv.excel):
+            delimiter = ':'
+
+        # make sure target dir exists
+        if not os.path.exists(dir):
+            os.makedirs(dir)
+
+        # maximum csv field length exceeding configured size?
+        max_len = self.db.config.CSV_FIELD_SIZE
+
+        # do all the classes specified
+        for classname in classes:
+            cl = self.get_class(classname)
+
+            if not export_files and hasattr(cl, 'export_files'):
+                sys.stdout.write('Exporting %s WITHOUT the files\r\n' %
+                                 classname)
 
+            with open(os.path.join(dir, classname+'.csv'), 'w') as f:
+                writer = csv.writer(f, colon_separated)
+
+                propnames = cl.export_propnames()
+                fields = propnames[:]
+                fields.append('is retired')
+                writer.writerow(fields)
+
+                # If a node has a key, sort all nodes by key
+                # with retired nodes first. Retired nodes
+                # must occur before a non-retired node with
+                # the same key. Otherwise you get an
+                # IntegrityError: UNIQUE constraint failed:
+                #     _class.__retired__, _<class>._<keyname>
+                # on imports to rdbms.
+                all_nodes = cl.getnodeids()
+
+                classkey = cl.getkey()
+                if classkey:  # False sorts before True, so negate is_retired
+                    keysort = lambda i: (cl.get(i, classkey),  # noqa: E731
+                                         not cl.is_retired(i))
+                    all_nodes.sort(key=keysort)
+                # if there is no classkey no need to sort
+
+                for nodeid in all_nodes:
+                    if self.verbose:
+                        sys.stdout.write('\rExporting %s - %s' %
+                                         (classname, nodeid))
+                        sys.stdout.flush()
+                    node = cl.getnode(nodeid)
+                    exp = cl.export_list(propnames, nodeid)
+                    lensum = sum([len(repr_export(node[p])) for
+                                  p in propnames])
+                    # for a safe upper bound of field length we add
+                    # difference between CSV len and sum of all field lengths
+                    d = sum([len(x) for x in exp]) - lensum
+                    if not d > 0:
+                        raise AssertionError("Bad assertion d > 0")
+                    for p in propnames:
+                        ll = len(repr_export(node[p])) + d
+                        if ll > max_len:
+                            max_len = ll
+                    writer.writerow(exp)
+                    if export_files and hasattr(cl, 'export_files'):
+                        cl.export_files(dir, nodeid)
+
+            # export the journals
+            with open(os.path.join(dir, classname+'-journals.csv'), 'w') as jf:
+                if self.verbose:
+                    sys.stdout.write("\nExporting Journal for %s\n" %
+                                     classname)
+                    sys.stdout.flush()
+                journals = csv.writer(jf, colon_separated)
+                for row in cl.export_journals():
+                    journals.writerow(row)
+        if max_len > self.db.config.CSV_FIELD_SIZE:
+            print("Warning: config csv_field_size should be at least %s" %
+                  max_len, file=sys.stderr)
         return 0
 
-    def do_set(self, args):
-        ''"""Usage: set items property=value property=value ...
-        Set the given properties of one or more items(s).
-
-        The items are specified as a class or as a comma-separated
-        list of item designators (ie "designator[,designator,...]").
-
-        A designator is a classname and a nodeid concatenated,
-        eg. bug1, user10, ...
-
-        This command sets the properties to the values for all
-        designators given. If a class is used, the property will be
-        set for all items in the class. If the value is missing
-        (ie. "property=") then the property is un-set. If the property
-        is a multilink, you specify the linked ids for the multilink
-        as comma-separated numbers (ie "1,2,3").
-
-        """
-        import copy  # needed for copying props list
-
-        if len(args) < 2:
-            raise UsageError(_('Not enough arguments supplied'))
-        from roundup import hyperdb
+    def do_exporttables(self, args):
+        ''"""Usage: exporttables [[-]class[,class]] export_dir
+        Export the database to colon-separated-value files, excluding the
+        files below $TRACKER_HOME/db/files/ (which can be archived separately).
+        To include the files, use the export command.
 
-        designators = args[0].split(',')
-        if len(designators) == 1:
-            designator = designators[0]
-            try:
-                designator = hyperdb.splitDesignator(designator)
-                designators = [designator]
-            except hyperdb.DesignatorError:
-                cl = self.get_class(designator)
-                designators = [(designator, x) for x in cl.list()]
-        else:
-            try:
-                designators = [hyperdb.splitDesignator(x) for x in designators]
-            except hyperdb.DesignatorError as message:
-                raise UsageError(message)
-
-        # get the props from the args
-        propset = self.props_from_args(args[1:])  # parse the cli once
+        Optionally limit the export to just the named classes
+        or exclude the named classes, if the 1st argument starts with '-'.
 
-        # now do the set for all the nodes
-        for classname, itemid in designators:
-            props = copy.copy(propset)  # make a new copy for every designator
-            cl = self.get_class(classname)
-
-            for key, value in list(props.items()):
-                try:
-                    # You must reinitialize the props every time though.
-                    # if props['nosy'] = '+admin' initally, it gets
-                    # set to 'demo,admin' (assuming it was set to demo
-                    # in the db) after rawToHyperdb returns.
-                    # This  new value is used for all the rest of the
-                    # designators if not reinitalized.
-                    props[key] = hyperdb.rawToHyperdb(self.db, cl, itemid,
-                                                      key, value)
-                except hyperdb.HyperdbValueError as message:
-                    raise UsageError(message)
-
-            # try the set
-            try:
-                cl.set(itemid, **props)
-            except (TypeError, IndexError, ValueError) as message:
-                raise UsageError(message)
-        self.db_uncommitted = True
-        return 0
+        This action exports the current data from the database into
+        colon-separated-value files that are placed in the nominated
+        destination directory.
+        """
+        return self.do_export(args, export_files=False)
 
     def do_filter(self, args):
         ''"""Usage: filter classname propname=value ...
@@ -957,44 +771,64 @@
             raise UsageError(message)
         return 0
 
-    def do_specification(self, args):
-        ''"""Usage: specification classname
-        Show the properties for a classname.
-
-        This lists the properties for a given class.
+    def do_genconfig(self, args, update=False):
+        ''"""Usage: genconfig <filename>
+        Generate a new tracker config file (ini style) with default
+        values in <filename>.
         """
         if len(args) < 1:
             raise UsageError(_('Not enough arguments supplied'))
-        classname = args[0]
-        # get the class
-        cl = self.get_class(classname)
+
+        if update:
+            # load current config for writing
+            config = CoreConfig(self.tracker_home)
+
+            # change config to update settings to new defaults
+            # where prior defaults were chosen
+            default_ppdr = config._get_option(
+                'PASSWORD_PBKDF2_DEFAULT_ROUNDS')._default_value
 
-        # get the key property
-        keyprop = cl.getkey()
-        for key in cl.properties:
-            value = cl.properties[key]
-            if keyprop == key:
-                sys.stdout.write(_('%(key)s: %(value)s (key property)\n') %
-                                 locals())
-            else:
-                sys.stdout.write(_('%(key)s: %(value)s\n') % locals())
+            print("")  # put a blank line before feedback
+            if config.PASSWORD_PBKDF2_DEFAULT_ROUNDS in [10000]:
+                print(_("Changing option\n"
+                        "   'password_pbkdf2_default_rounds'\n"
+                        "from old default of %(old_number)s to new "
+                        "default of %(new_number)s.") % {
+                            "old_number":
+                            config.PASSWORD_PBKDF2_DEFAULT_ROUNDS,
+                            "new_number": default_ppdr
+                        }
+                )
+                config.PASSWORD_PBKDF2_DEFAULT_ROUNDS = default_ppdr
 
-    def do_display(self, args):
-        ''"""Usage: display designator[,designator]*
+            if config.PASSWORD_PBKDF2_DEFAULT_ROUNDS < default_ppdr:
+                print(_("Update "
+                        "'password_pbkdf2_default_rounds' "
+                        "to a number equal to or larger\nthan %s.") %
+                      default_ppdr)
+        else:
+            # generate default config
+            config = CoreConfig()
 
-        Show the property values for the given node(s).
+        config.save(args[0])
+
+    def do_get(self, args):
+        ''"""Usage: get property designator[,designator]*
+        Get the given property of one or more designator(s).
 
         A designator is a classname and a nodeid concatenated,
         eg. bug1, user10, ...
 
-        This lists the properties and their associated values
-        for the given node.
+        Retrieves the property value of the nodes specified
+        by the designators.
         """
-        if len(args) < 1:
+        if len(args) < 2:
             raise UsageError(_('Not enough arguments supplied'))
-
-        # decode the node designator
-        for designator in args[0].split(','):
+        propname = args[0]
+        designators = args[1].split(',')
+        linked_props = []
+        for designator in designators:
+            # decode the node designator
             try:
                 classname, nodeid = hyperdb.splitDesignator(designator)
             except hyperdb.DesignatorError as message:
@@ -1002,81 +836,408 @@
 
             # get the class
             cl = self.get_class(classname)
+            try:
+                id = []
+                if self.separator:
+                    if self.print_designator:
+                        # see if property is a link or multilink for
+                        # which getting a desginator make sense.
+                        # Algorithm: Get the properties of the
+                        #     current designator's class. (cl.getprops)
+                        # get the property object for the property the
+                        #     user requested (properties[propname])
+                        # verify its type (isinstance...)
+                        # raise error if not link/multilink
+                        # get class name for link/multilink property
+                        # do the get on the designators
+                        # append the new designators
+                        # print
+                        properties = cl.getprops()
+                        property = properties[propname]
+                        if not (isinstance(property, hyperdb.Multilink) or
+                                isinstance(property, hyperdb.Link)):
+                            raise UsageError(_(
+                                'property %s is not of type'
+                                ' Multilink or Link so -d flag does not '
+                                'apply.') % propname)
+                        propclassname = self.db.getclass(property.classname).classname
+                        id = cl.get(nodeid, propname)
+                        for i in id:
+                            linked_props.append(propclassname + i)
+                    else:
+                        id = cl.get(nodeid, propname)
+                        for i in id:
+                            linked_props.append(i)
+                else:
+                    if self.print_designator:
+                        properties = cl.getprops()
+                        property = properties[propname]
+                        if not (isinstance(property, hyperdb.Multilink) or
+                                isinstance(property, hyperdb.Link)):
+                            raise UsageError(_(
+                                'property %s is not of type'
+                                ' Multilink or Link so -d flag does not '
+                                'apply.') % propname)
+                        propclassname = self.db.getclass(property.classname).classname
+                        id = cl.get(nodeid, propname)
+                        for i in id:
+                            print(propclassname + i)
+                    else:
+                        print(cl.get(nodeid, propname))
+            except IndexError:
+                raise UsageError(_('no such %(classname)s node '
+                                   '"%(nodeid)s"') % locals())
+            except KeyError:
+                raise UsageError(_('no such %(classname)s property '
+                                   '"%(propname)s"') % locals())
+        if self.separator:
+            print(self.separator.join(linked_props))
 
-            # display the values
-            keys = sorted(cl.properties)
-            for key in keys:
-                value = cl.get(nodeid, key)
-                print(_('%(key)s: %(value)s') % locals())
+        return 0
+
+    def do_help(self, args, nl_re=nl_re, indent_re=indent_re):
+        ''"""Usage: help topic
+        Give help about topic.
+
+        commands  -- list commands
+        <command> -- help specific to a command
+        initopts  -- init command options
+        all       -- all available help
+        """
+        if len(args) > 0:
+            topic = args[0]
+        else:
+            topic = 'help'
 
-    def do_create(self, args):
-        ''"""Usage: create classname property=value ...
-        Create a new entry of a given class.
+        # try help_ methods
+        if topic in self.help:
+            self.help[topic]()
+            return 0
+
+        # try command docstrings
+        try:
+            cmd_docs = self.commands.get(topic)
+        except KeyError:
+            print(_('Sorry, no help for "%(topic)s"') % locals())
+            return 1
+
+        # display the help for each match, removing the docstring indent
+        for _name, help in cmd_docs:
+            lines = nl_re.split(_(help.__doc__))
+            print(lines[0])
+            indent = indent_re.match(lines[1])
+            if indent: indent = len(indent.group(1))  # noqa: E701
+            for line in lines[1:]:
+                if indent:
+                    print(line[indent:])
+                else:
+                    print(line)
+        return 0
+
+    def do_history(self, args):
+        ''"""Usage: history designator [skipquiet]
+        Show the history entries of a designator.
+
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
+        Lists the journal entries viewable by the user for the
+        node identified by the designator. If skipquiet is the
+        second argument, journal entries for quiet properties
+        are not shown.
+        """
 
-        This creates a new entry of the given class using the property
-        name=value arguments provided on the command line after the "create"
-        command.
+        if len(args) < 1:
+            raise UsageError(_('Not enough arguments supplied'))
+        try:
+            classname, nodeid = hyperdb.splitDesignator(args[0])
+        except hyperdb.DesignatorError as message:
+            raise UsageError(message)
+
+        skipquiet = False
+        if len(args) == 2:
+            if args[1] != 'skipquiet':
+                raise UsageError("Second argument is not skipquiet")
+            skipquiet = True
+
+        try:
+            print(self.db.getclass(classname).history(nodeid,
+                                                      skipquiet=skipquiet))
+        except KeyError:
+            raise UsageError(_('no such class "%(classname)s"') % locals())
+        except IndexError:
+            raise UsageError(_('no such %(classname)s node '
+                               '"%(nodeid)s"') % locals())
+        return 0
+
+    def do_import(self, args, import_files=True):
+        ''"""Usage: import import_dir
+        Import a database from the directory containing CSV files,
+        two per class to import.
+
+        The files used in the import are:
+
+        <class>.csv
+          This must define the same properties as the class (including
+          having a "header" line with those property names.)
+        <class>-journals.csv
+          This defines the journals for the items being imported.
+
+        The imported nodes will have the same nodeid as defined in the
+        import file, thus replacing any existing content.
+
+        The new nodes are added to the existing database - if you want to
+        create a new database using the imported data, then create a new
+        database (or, tediously, retire all the old data.)
         """
         if len(args) < 1:
             raise UsageError(_('Not enough arguments supplied'))
-        from roundup import hyperdb
+
+        if hasattr(csv, 'field_size_limit'):
+            csv.field_size_limit(self.db.config.CSV_FIELD_SIZE)
+
+        # directory to import from
+        dir = args[0]
+
+        class colon_separated(csv.excel):
+            delimiter = ':'
+
+        # import all the files
+        for file in os.listdir(dir):
+            classname, ext = os.path.splitext(file)
+            # we only care about CSV files
+            if ext != '.csv' or classname.endswith('-journals'):
+                continue
+
+            cl = self.get_class(classname)
+
+            # ensure that the properties and the CSV file headings match
+            with open(os.path.join(dir, file), 'r') as f:
+                reader = csv.reader(f, colon_separated)
+                file_props = None
+                maxid = 1
+                # loop through the file and create a node for each entry
+                for n, r in enumerate(reader):
+                    if file_props is None:
+                        file_props = r
+                        continue
+
+                    if self.verbose:
+                        sys.stdout.write('\rImporting %s - %s' % (classname, n))
+                        sys.stdout.flush()
+
+                    # do the import and figure the current highest nodeid
+                    nodeid = cl.import_list(file_props, r)
+                    if hasattr(cl, 'import_files') and import_files:
+                        cl.import_files(dir, nodeid)
+                    maxid = max(maxid, int(nodeid))
+
+                # (print to sys.stdout here to allow tests to squash it .. ugh)
+                print(file=sys.stdout)
+
+            # import the journals
+            with open(os.path.join(args[0], classname + '-journals.csv'), 'r') as f:
+                reader = csv.reader(f, colon_separated)
+                cl.import_journals(reader)
+
+            # (print to sys.stdout here to allow tests to squash it .. ugh)
+            print('setting', classname, maxid+1, file=sys.stdout)
+
+            # set the id counter
+            self.db.setid(classname, str(maxid+1))
+
+        self.db_uncommitted = True
+        return 0
+
+    def do_importtables(self, args):
+        ''"""Usage: importtables export_dir
 
-        classname = args[0]
+        This imports the database tables exported using exporttables.
+        """
+        return self.do_import(args, import_files=False)
+
+    def do_initialise(self, tracker_home, args):
+        ''"""Usage: initialise [adminpw]
+        Initialise a new Roundup tracker.
+
+        The administrator details will be set at this step.
+
+        Execute the tracker's initialisation function dbinit.init()
+        """
+        # password
+        if len(args) > 1:
+            adminpw = args[1]
+        else:
+            adminpw = ''
+            confirm = 'x'
+            while adminpw != confirm:
+                adminpw = getpass.getpass(_('Admin Password: '))
+                confirm = getpass.getpass(_('       Confirm: '))
+
+        # make sure the tracker home is installed
+        if not os.path.exists(tracker_home):
+            raise UsageError(_('Instance home does not exist') % locals())
+        try:
+            tracker = roundup.instance.open(tracker_home)
+        except roundup.instance.TrackerError:
+            raise UsageError(_('Instance has not been installed') % locals())
 
-        # get the class
-        cl = self.get_class(classname)
+        # is there already a database?
+        if tracker.exists():
+            if not self.force:
+                ok = self.my_input(_(
+"""WARNING: The database is already initialised!
+If you re-initialise it, you will lose all the data!
+Erase it? Y/N: """))  # noqa: E122
+                if ok.strip().lower() != 'y':
+                    return 0
+
+            # nuke it
+            tracker.nuke()
+
+        # GO
+        try:
+            tracker.init(password.Password(adminpw, config=tracker.config),
+                         tx_Source='cli')
+        except OptionUnsetError as e:
+            raise UsageError("In %(tracker_home)s/config.ini - %(error)s" % {
+                'error': str(e), 'tracker_home': tracker_home})
+
+        return 0
+
+    def do_install(self, tracker_home, args):
+        ''"""Usage: install [template [backend [key=val[,key=val]]]]
+        Install a new Roundup tracker.
+
+        The command will prompt for the tracker home directory
+        (if not supplied through TRACKER_HOME or the -i option).
+        The template and backend may be specified on the command-line
+        as arguments, in that order.
 
-        # now do a create
-        props = {}
-        properties = cl.getprops(protected=0)
-        if len(args) == 1:
-            # ask for the properties
-            for key in properties:
-                if key == 'id': continue  # noqa: E701
-                value = properties[key]
-                name = value.__class__.__name__
-                if isinstance(value, hyperdb.Password):
-                    again = None
-                    while value != again:
-                        value = getpass.getpass(_('%(propname)s (Password): ')
-                                                %
-                                                {'propname': key.capitalize()})
-                        again = getpass.getpass(_('   %(propname)s (Again): ')
-                                                %
-                                                {'propname': key.capitalize()})
-                        if value != again:
-                            print(_('Sorry, try again...'))
-                    if value:
-                        props[key] = value
-                else:
-                    value = self.my_input(_('%(propname)s (%(proptype)s): ') % {
-                        'propname': key.capitalize(), 'proptype': name})
-                    if value:
-                        props[key] = value
+        Command line arguments following the backend allows you to
+        pass initial values for config options.  For example, passing
+        "web_http_auth=no,rdbms_user=dinsdale" will override defaults
+        for options http_auth in section [web] and user in section [rdbms].
+        Please be careful to not use spaces in this argument! (Enclose
+        whole argument in quotes if you need spaces in option value).
+
+        The initialise command must be called after this command in order
+        to initialise the tracker's database. You may edit the tracker's
+        initial database contents before running that command by editing
+        the tracker's dbinit.py module init() function.
+
+        See also initopts help.
+        """
+        if len(args) < 1:
+            raise UsageError(_('Not enough arguments supplied'))
+
+        # make sure the tracker home can be created
+        tracker_home = os.path.abspath(tracker_home)
+        parent = os.path.split(tracker_home)[0]
+        if not os.path.exists(parent):
+            raise UsageError(_('Instance home parent directory "%(parent)s"'
+                               ' does not exist') % locals())
+
+        config_ini_file = os.path.join(tracker_home, CoreConfig.INI_FILE)
+        # check for both old- and new-style configs
+        if list(filter(os.path.exists, [config_ini_file,
+                os.path.join(tracker_home, 'config.py')])):
+            if not self.force:
+                ok = self.my_input(_(
+"""WARNING: There appears to be a tracker in "%(tracker_home)s"!
+If you re-install it, you will lose all the data!
+Erase it? Y/N: """) % locals())  # noqa: E122
+                if ok.strip().lower() != 'y':
+                    return 0
+
+            # clear it out so the install isn't confused
+            shutil.rmtree(tracker_home)
+
+        # select template
+        templates = self.listTemplates()
+        template = self._get_choice(
+            list_name=_('Templates:'),
+            prompt=_('Select template'),
+            options=templates,
+            argument=len(args) > 1 and args[1] or '',
+            default='classic')
+
+        # select hyperdb backend
+        import roundup.backends
+        backends = roundup.backends.list_backends()
+        backend = self._get_choice(
+            list_name=_('Back ends:'),
+            prompt=_('Select backend'),
+            options=backends,
+            argument=len(args) > 2 and args[2] or '',
+            default='anydbm')
+        # XXX perform a unit test based on the user's selections
+
+        # Process configuration file definitions
+        if len(args) > 3:
+            try:
+                defns = dict([item.split("=") for item in args[3].split(",")])
+            except Exception:
+                print(_('Error in configuration settings: "%s"') % args[3])
+                raise
         else:
-            props = self.props_from_args(args[1:])
+            defns = {}
+
+        defns['rdbms_backend'] = backend
+
+        # load config_ini.ini from template if it exists.
+        # it sets parameters like template_engine that are
+        # template specific.
+        template_config = UserConfig(templates[template]['path'] +
+                                     "/config_ini.ini")
+        for k in template_config.keys():
+            if k == 'HOME':  # ignore home. It is a default param.
+                continue
+            defns[k] = template_config[k]
 
-        # convert types
-        for propname in props:
-            try:
-                props[propname] = hyperdb.rawToHyperdb(self.db, cl, None,
-                                                       propname,
-                                                       props[propname])
-            except hyperdb.HyperdbValueError as message:
-                raise UsageError(message)
+        # install!
+        init.install(tracker_home, templates[template]['path'], settings=defns)
+
+        # Remove config_ini.ini file from tracker_home (not template dir).
+        # Ignore file not found - not all templates have
+        #   config_ini.ini files.
+        try:
+            os.remove(tracker_home + "/config_ini.ini")
+        except OSError as e:  # FileNotFound exception under py3
+            if e.errno == 2:
+                pass
+            else:
+                raise
 
-        # check for the key property
-        propname = cl.getkey()
-        if propname and propname not in props:
-            raise UsageError(_('you must provide the "%(propname)s" '
-                               'property.') % locals())
+        print(_("""
+---------------------------------------------------------------------------
+ You should now edit the tracker configuration file:
+   %(config_file)s""") % {"config_file": config_ini_file})
+
+        # find list of options that need manual adjustments
+        # XXX config._get_unset_options() is marked as private
+        #   (leading underscore).  make it public or don't care?
+        need_set = CoreConfig(tracker_home)._get_unset_options()
+        if need_set:
+            print(_(" ... at a minimum, you must set following options:"))
+            for section in need_set:
+                print("   [%s]: %s" % (section, ", ".join(need_set[section])))
 
-        # do the actual create
-        try:
-            sys.stdout.write(cl.create(**props) + '\n')
-        except (TypeError, IndexError, ValueError) as message:
-            raise UsageError(message)
-        self.db_uncommitted = True
+        # note about schema modifications
+        print(_("""
+ If you wish to modify the database schema,
+ you should also edit the schema file:
+   %(database_config_file)s
+ You may also change the database initialisation file:
+   %(database_init_file)s
+ ... see the documentation on customizing for more information.
+
+ You MUST run the "roundup-admin initialise" command once you've performed
+ the above steps.
+---------------------------------------------------------------------------
+""") % {'database_config_file': os.path.join(tracker_home, 'schema.py'),
+        'database_init_file': os.path.join(tracker_home, 'initial_data.py')}) \
+        # noqa: E122
         return 0
 
     def do_list(self, args):
@@ -1132,33 +1293,394 @@
                 print(_('%(nodeid)4s: %(value)s') % locals())
         return 0
 
-    def do_templates(self, args):
-        ''"""Usage: templates [trace_search]
-        List templates and their installed directories.
+    def do_migrate(self, args):
+        ''"""Usage: migrate
+
+        Update a tracker's database to be compatible with the Roundup
+        codebase.
+
+        You should run the "migrate" command for your tracker once
+        you've installed the latest codebase.
+
+        Do this before you use the web, command-line or mail interface
+        and before any users access the tracker.
+
+        This command will respond with either "Tracker updated" (if
+        you've not previously run it on an RDBMS backend) or "No
+        migration action required" (if you have run it, or have used
+        another interface to the tracker, or possibly because you are
+        using anydbm).
+
+        It's safe to run this even if it's not required, so just get
+        into the habit.
+        """
+        if self.db.db_version_updated:
+            print(_('Tracker updated to schema version %s.') %
+                  self.db.database_schema['version'])
+            self.db_uncommitted = True
+        else:
+            print(_('No migration action required. At schema version %s.') %
+                  self.db.database_schema['version'])
+        return 0
+
+    def do_pack(self, args):
+        ''"""Usage: pack period | date
+
+        Remove journal entries older than a period of time specified or
+        before a certain date.
+
+        A period is specified using the suffixes "y", "m", and "d". The
+        suffix "w" (for "week") means 7 days.
+
+              "3y" means three years
+              "2y 1m" means two years and one month
+              "1m 25d" means one month and 25 days
+              "2w 3d" means two weeks and three days
+
+        Date format is "YYYY-MM-DD" eg:
+            2001-01-01
+
+        """
+        if len(args) != 1:
+            raise UsageError(_('Not enough arguments supplied'))
+
+        # are we dealing with a period or a date
+        value = args[0]
+        date_re = re.compile(r"""
+              (?P<date>\d\d\d\d-\d\d?-\d\d?)? # yyyy-mm-dd
+              (?P<period>(\d+y\s*)?(\d+m\s*)?(\d+d\s*)?)?
+              """, re.VERBOSE)
+        m = date_re.match(value)
+        if not m:
+            raise ValueError(_('Invalid format'))
+        m = m.groupdict()
+        if m['period']:
+            pack_before = date.Date(". - %s" % value)
+        elif m['date']:
+            pack_before = date.Date(value)
+        self.db.pack(pack_before)
+        self.db_uncommitted = True
+        return 0
+
+    def do_perftest(self, args):
+        ''"""Usage: perftest [mode] [arguments]*
+
+        Time operations in Roundup. Supported arguments:
+
+            [password] [rounds=<integer>] [scheme=<scheme>]
+
+        'password' is the default mode.  The tracker's config.ini
+        setting for 'password_pbkdf2_default_rounds' is the default
+        value for 'rounds'. On the command line, 'rounds' can include
+        thousands separator of ',' or '.'.  'scheme' is the default
+        coded into Roundup. List supported schemes by using 'scheme='.
+
+        """
+        from roundup.anypy.time_ import perf_counter
 
-        With trace_search also list all directories that are
-        searched for templates.
+        props = {"rounds": self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS,
+                 "scheme": password.Password.known_schemes[0]}
+
+        print_supported_schemes = lambda: print(
+            "Supported schemes (default is first, case "
+            "sensitive):\n   %s." %
+            ", ".join(password.Password.known_schemes))
+
+        if (args[0].find("=") != -1):
+            args.insert(0, 'password')
+
+        props.update(self.props_from_args(args[1:]))
+
+        if args[0] == "password":
+            try:
+                # convert 10,000,000 or 10.000.000 to 10000000
+                r = int(re.sub('[,.]', '', props['rounds']))
+                if r < 1000:
+                    print(_("Invalid 'rounds'. Must be larger than 999."))
+                    return
+                props['rounds'] = r
+            except (TypeError, ValueError):
+                print(_("Invalid 'rounds'. It must be an integer not: %s") %
+                      props['rounds'])
+                return
+            if props['scheme'] is None:
+                print_supported_schemes()
+                return
+
+            self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS = props['rounds']
+
+            try:
+                tic = perf_counter()
+                pw_hash = password.encodePassword(
+                    "this is a long password to hash",
+                    props['scheme'],
+                    None,
+                    config=self.db.config
+                )
+                toc = perf_counter()
+            except password.PasswordValueError as e:
+                print(e)
+                print_supported_schemes()
+                return
+
+            if props['scheme'].startswith('PBKDF2'):
+                (rounds, salt, _raw_salt, digest) = password.pbkdf2_unpack(
+                    pw_hash)
+            else:
+                rounds = _("scheme does not support rounds.")
+
+            print(_(
+                "Hash time: %(time)0.9f seconds, scheme: %(scheme)s, "
+                "rounds: %(rounds)s") %
+                  {"time": toc-tic, "scheme": props['scheme'],
+                   "rounds": rounds})
+
+    designator_re = re.compile('([A-Za-z]+)([0-9]+)')
+
+    def do_reindex(self, args, desre=designator_re):
+        ''"""Usage: reindex [classname|designator]*
+        Re-generate a tracker's search indexes.
+
+        This will re-generate the search indexes for a tracker.
+        This will typically happen automatically.
         """
-        import textwrap
+        if args:
+            for arg in args:
+                m = desre.match(arg)
+                if m:
+                    cl = self.get_class(m.group(1))
+                    try:
+                        cl.index(m.group(2))
+                    except IndexError:
+                        raise UsageError(_('no such item "%(designator)s"') % {
+                            'designator': arg})
+                else:
+                    cl = self.get_class(arg)
+                    self.db.reindex(arg)
+        else:
+            self.db.reindex(show_progress=True)
+        return 0
 
-        trace_search = False
-        if args and args[0] == "trace_search":
-            trace_search = True
+    def do_restore(self, args):
+        ''"""Usage: restore designator[,designator]*
+        Restore the retired node specified by designator.
+
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
 
-        templates = self.listTemplates(trace_search=trace_search)
+        The given nodes will become available for users again.
+        """
+        if len(args) < 1:
+            raise UsageError(_('Not enough arguments supplied'))
+        designators = args[0].split(',')
+        for designator in designators:
+            try:
+                classname, nodeid = hyperdb.splitDesignator(designator)
+            except hyperdb.DesignatorError as message:
+                raise UsageError(message)
+            try:
+                dbclass = self.db.getclass(classname)
+            except KeyError:
+                raise UsageError(_('no such class "%(classname)s"') % locals())
+            try:
+                dbclass.restore(nodeid)
+            except KeyError as e:
+                raise UsageError(e.args[0])
+            except IndexError:
+                raise UsageError(_('no such %(classname)s node '
+                                   '" % (nodeid)s"') % locals())
+        self.db_uncommitted = True
+        return 0
+
+    def do_retire(self, args):
+        ''"""Usage: retire designator[,designator]*
+        Retire the node specified by designator.
+
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
+        This action indicates that a particular node is not to be retrieved
+        by the list or find commands, and its key value may be re-used.
+        """
+        if len(args) < 1:
+            raise UsageError(_('Not enough arguments supplied'))
+        designators = args[0].split(',')
+        for designator in designators:
+            try:
+                classname, nodeid = hyperdb.splitDesignator(designator)
+            except hyperdb.DesignatorError as message:
+                raise UsageError(message)
+            try:
+                self.db.getclass(classname).retire(nodeid)
+            except KeyError:
+                raise UsageError(_('no such class "%(classname)s"') % locals())
+            except IndexError:
+                raise UsageError(_('no such %(classname)s node '
+                                   '"%(nodeid)s"') % locals())
+        self.db_uncommitted = True
+        return 0
+
+    def do_rollback(self, args):
+        ''"""Usage: rollback
+        Undo all changes that are pending commit to the database.
+
+        The changes made during an interactive session are not
+        automatically written to the database - they must be committed
+        manually. This command undoes all those changes, so a commit
+        immediately after would make no changes to the database.
+        """
+        self.db.rollback()
+        self.db_uncommitted = False
+        return 0
+
+    def do_security(self, args):
+        ''"""Usage: security [Role name]
 
-        for name in sorted(list(templates.keys())):
-            templates[name]['description'] = textwrap.fill(
-                "\n".join([line.lstrip() for line in
-                           templates[name]['description'].split("\n")]),
-                70,
-                subsequent_indent="      "
-            )
-            print("""
-Name: %(name)s
-Path: %(path)s
-Desc: %(description)s
-""" % templates[name])
+             Display the Permissions available to one or all Roles.
+        """
+        if len(args) == 1:
+            role = args[0]
+            try:
+                roles = [(args[0], self.db.security.role[args[0]])]
+            except KeyError:
+                sys.stdout.write(_('No such Role "%(role)s"\n') % locals())
+                return 1
+        else:
+            roles = list(self.db.security.role.items())
+            role = self.db.config.NEW_WEB_USER_ROLES
+            if ',' in role:
+                sys.stdout.write(_('New Web users get the Roles "%(role)s"\n')
+                                 % locals())
+            else:
+                sys.stdout.write(_('New Web users get the Role "%(role)s"\n')
+                                 % locals())
+            role = self.db.config.NEW_EMAIL_USER_ROLES
+            if ',' in role:
+                sys.stdout.write(_('New Email users get the Roles "%(role)s"\n') % locals())
+            else:
+                sys.stdout.write(_('New Email users get the Role "%(role)s"\n') % locals())
+        roles.sort()
+        for _rolename, role in roles:
+            sys.stdout.write(_('Role "%(name)s":\n') % role.__dict__)
+            for permission in role.permissions:
+                d = permission.__dict__
+                if permission.klass:
+                    if permission.properties:
+                        sys.stdout.write(_(
+                            ' %(description)s (%(name)s for "%(klass)s"' +
+                            ': %(properties)s only)\n') % d)
+                        # verify that properties exist; report bad props
+                        bad_props = []
+                        cl = self.db.getclass(permission.klass)
+                        class_props = cl.getprops(protected=True)
+                        for p in permission.properties:
+                            if p in class_props:
+                                continue
+                            else:
+                                bad_props.append(p)
+                        if bad_props:
+                            sys.stdout.write(_(
+                                '\n  **Invalid properties for %(class)s: '
+                                '%(props)s\n\n') % {
+                                    "class": permission.klass,
+                                    "props": bad_props})
+                            return 1
+                    else:
+                        sys.stdout.write(_(' %(description)s (%(name)s for '
+                                           '"%(klass)s" only)\n') % d)
+                else:
+                    sys.stdout.write(_(' %(description)s (%(name)s)\n') % d)
+        return 0
+
+    def do_set(self, args):
+        ''"""Usage: set items property=value property=value ...
+        Set the given properties of one or more items(s).
+
+        The items are specified as a class or as a comma-separated
+        list of item designators (ie "designator[,designator,...]").
+
+        A designator is a classname and a nodeid concatenated,
+        eg. bug1, user10, ...
+
+        This command sets the properties to the values for all
+        designators given. If a class is used, the property will be
+        set for all items in the class. If the value is missing
+        (ie. "property=") then the property is un-set. If the property
+        is a multilink, you specify the linked ids for the multilink
+        as comma-separated numbers (ie "1,2,3").
+
+        """
+        import copy  # needed for copying props list
+
+        if len(args) < 2:
+            raise UsageError(_('Not enough arguments supplied'))
+        from roundup import hyperdb
+
+        designators = args[0].split(',')
+        if len(designators) == 1:
+            designator = designators[0]
+            try:
+                designator = hyperdb.splitDesignator(designator)
+                designators = [designator]
+            except hyperdb.DesignatorError:
+                cl = self.get_class(designator)
+                designators = [(designator, x) for x in cl.list()]
+        else:
+            try:
+                designators = [hyperdb.splitDesignator(x) for x in designators]
+            except hyperdb.DesignatorError as message:
+                raise UsageError(message)
+
+        # get the props from the args
+        propset = self.props_from_args(args[1:])  # parse the cli once
+
+        # now do the set for all the nodes
+        for classname, itemid in designators:
+            props = copy.copy(propset)  # make a new copy for every designator
+            cl = self.get_class(classname)
+
+            for key, value in list(props.items()):
+                try:
+                    # You must reinitialize the props every time though.
+                    # if props['nosy'] = '+admin' initally, it gets
+                    # set to 'demo,admin' (assuming it was set to demo
+                    # in the db) after rawToHyperdb returns.
+                    # This  new value is used for all the rest of the
+                    # designators if not reinitalized.
+                    props[key] = hyperdb.rawToHyperdb(self.db, cl, itemid,
+                                                      key, value)
+                except hyperdb.HyperdbValueError as message:
+                    raise UsageError(message)
+
+            # try the set
+            try:
+                cl.set(itemid, **props)
+            except (TypeError, IndexError, ValueError) as message:
+                raise UsageError(message)
+        self.db_uncommitted = True
+        return 0
+
+    def do_specification(self, args):
+        ''"""Usage: specification classname
+        Show the properties for a classname.
+
+        This lists the properties for a given class.
+        """
+        if len(args) < 1:
+            raise UsageError(_('Not enough arguments supplied'))
+        classname = args[0]
+        # get the class
+        cl = self.get_class(classname)
+
+        # get the key property
+        keyprop = cl.getkey()
+        for key in cl.properties:
+            value = cl.properties[key]
+            if keyprop == key:
+                sys.stdout.write(_('%(key)s: %(value)s (key property)\n') %
+                                 locals())
+            else:
+                sys.stdout.write(_('%(key)s: %(value)s\n') % locals())
 
     def do_table(self, args):
         ''"""Usage: table classname [property[,property]*]
@@ -1262,563 +1784,41 @@
             print(' '.join(table_columns))
         return 0
 
-    def do_history(self, args):
-        ''"""Usage: history designator [skipquiet]
-        Show the history entries of a designator.
-
-        A designator is a classname and a nodeid concatenated,
-        eg. bug1, user10, ...
-
-        Lists the journal entries viewable by the user for the
-        node identified by the designator. If skipquiet is the
-        second argument, journal entries for quiet properties
-        are not shown.
-        """
-
-        if len(args) < 1:
-            raise UsageError(_('Not enough arguments supplied'))
-        try:
-            classname, nodeid = hyperdb.splitDesignator(args[0])
-        except hyperdb.DesignatorError as message:
-            raise UsageError(message)
-
-        skipquiet = False
-        if len(args) == 2:
-            if args[1] != 'skipquiet':
-                raise UsageError("Second argument is not skipquiet")
-            skipquiet = True
-
-        try:
-            print(self.db.getclass(classname).history(nodeid,
-                                                      skipquiet=skipquiet))
-        except KeyError:
-            raise UsageError(_('no such class "%(classname)s"') % locals())
-        except IndexError:
-            raise UsageError(_('no such %(classname)s node '
-                               '"%(nodeid)s"') % locals())
-        return 0
-
-    def do_commit(self, args):
-        ''"""Usage: commit
-        Commit changes made to the database during an interactive session.
-
-        The changes made during an interactive session are not
-        automatically written to the database - they must be committed
-        using this command.
-
-        One-off commands on the command-line are automatically committed if
-        they are successful.
-        """
-        self.db.commit()
-        self.db_uncommitted = False
-        return 0
-
-    def do_rollback(self, args):
-        ''"""Usage: rollback
-        Undo all changes that are pending commit to the database.
-
-        The changes made during an interactive session are not
-        automatically written to the database - they must be committed
-        manually. This command undoes all those changes, so a commit
-        immediately after would make no changes to the database.
-        """
-        self.db.rollback()
-        self.db_uncommitted = False
-        return 0
-
-    def do_retire(self, args):
-        ''"""Usage: retire designator[,designator]*
-        Retire the node specified by designator.
+    def do_templates(self, args):
+        ''"""Usage: templates [trace_search]
+        List templates and their installed directories.
 
-        A designator is a classname and a nodeid concatenated,
-        eg. bug1, user10, ...
-
-        This action indicates that a particular node is not to be retrieved
-        by the list or find commands, and its key value may be re-used.
-        """
-        if len(args) < 1:
-            raise UsageError(_('Not enough arguments supplied'))
-        designators = args[0].split(',')
-        for designator in designators:
-            try:
-                classname, nodeid = hyperdb.splitDesignator(designator)
-            except hyperdb.DesignatorError as message:
-                raise UsageError(message)
-            try:
-                self.db.getclass(classname).retire(nodeid)
-            except KeyError:
-                raise UsageError(_('no such class "%(classname)s"') % locals())
-            except IndexError:
-                raise UsageError(_('no such %(classname)s node '
-                                   '"%(nodeid)s"') % locals())
-        self.db_uncommitted = True
-        return 0
-
-    def do_restore(self, args):
-        ''"""Usage: restore designator[,designator]*
-        Restore the retired node specified by designator.
-
-        A designator is a classname and a nodeid concatenated,
-        eg. bug1, user10, ...
-
-        The given nodes will become available for users again.
-        """
-        if len(args) < 1:
-            raise UsageError(_('Not enough arguments supplied'))
-        designators = args[0].split(',')
-        for designator in designators:
-            try:
-                classname, nodeid = hyperdb.splitDesignator(designator)
-            except hyperdb.DesignatorError as message:
-                raise UsageError(message)
-            try:
-                dbclass = self.db.getclass(classname)
-            except KeyError:
-                raise UsageError(_('no such class "%(classname)s"') % locals())
-            try:
-                dbclass.restore(nodeid)
-            except KeyError as e:
-                raise UsageError(e.args[0])
-            except IndexError:
-                raise UsageError(_('no such %(classname)s node '
-                                   '" % (nodeid)s"') % locals())
-        self.db_uncommitted = True
-        return 0
-
-    def do_export(self, args, export_files=True):
-        ''"""Usage: export [[-]class[,class]] export_dir
-        Export the database to colon-separated-value files.
-        To exclude the files (e.g. for the msg or file class),
-        use the exporttables command.
-
-        Optionally limit the export to just the named classes
-        or exclude the named classes, if the 1st argument starts with '-'.
-
-        This action exports the current data from the database into
-        colon-separated-value files that are placed in the nominated
-        destination directory.
+        With trace_search also list all directories that are
+        searched for templates.
         """
-        # grab the directory to export to
-        if len(args) < 1:
-            raise UsageError(_('Not enough arguments supplied'))
-
-        dir = args[-1]
-
-        # get the list of classes to export
-        if len(args) == 2:
-            if args[0].startswith('-'):
-                classes = [c for c in self.db.classes
-                           if c not in args[0][1:].split(',')]
-            else:
-                classes = args[0].split(',')
-        else:
-            classes = self.db.classes
-
-        class colon_separated(csv.excel):
-            delimiter = ':'
-
-        # make sure target dir exists
-        if not os.path.exists(dir):
-            os.makedirs(dir)
-
-        # maximum csv field length exceeding configured size?
-        max_len = self.db.config.CSV_FIELD_SIZE
-
-        # do all the classes specified
-        for classname in classes:
-            cl = self.get_class(classname)
-
-            if not export_files and hasattr(cl, 'export_files'):
-                sys.stdout.write('Exporting %s WITHOUT the files\r\n' %
-                                 classname)
-
-            with open(os.path.join(dir, classname+'.csv'), 'w') as f:
-                writer = csv.writer(f, colon_separated)
-
-                propnames = cl.export_propnames()
-                fields = propnames[:]
-                fields.append('is retired')
-                writer.writerow(fields)
-
-                # If a node has a key, sort all nodes by key
-                # with retired nodes first. Retired nodes
-                # must occur before a non-retired node with
-                # the same key. Otherwise you get an
-                # IntegrityError: UNIQUE constraint failed:
-                #     _class.__retired__, _<class>._<keyname>
-                # on imports to rdbms.
-                all_nodes = cl.getnodeids()
-
-                classkey = cl.getkey()
-                if classkey:  # False sorts before True, so negate is_retired
-                    keysort = lambda i: (cl.get(i, classkey),  # noqa: E731
-                                         not cl.is_retired(i))
-                    all_nodes.sort(key=keysort)
-                # if there is no classkey no need to sort
+        import textwrap
 
-                for nodeid in all_nodes:
-                    if self.verbose:
-                        sys.stdout.write('\rExporting %s - %s' %
-                                         (classname, nodeid))
-                        sys.stdout.flush()
-                    node = cl.getnode(nodeid)
-                    exp = cl.export_list(propnames, nodeid)
-                    lensum = sum([len(repr_export(node[p])) for
-                                  p in propnames])
-                    # for a safe upper bound of field length we add
-                    # difference between CSV len and sum of all field lengths
-                    d = sum([len(x) for x in exp]) - lensum
-                    if not d > 0:
-                        raise AssertionError("Bad assertion d > 0")
-                    for p in propnames:
-                        ll = len(repr_export(node[p])) + d
-                        if ll > max_len:
-                            max_len = ll
-                    writer.writerow(exp)
-                    if export_files and hasattr(cl, 'export_files'):
-                        cl.export_files(dir, nodeid)
-
-            # export the journals
-            with open(os.path.join(dir, classname+'-journals.csv'), 'w') as jf:
-                if self.verbose:
-                    sys.stdout.write("\nExporting Journal for %s\n" %
-                                     classname)
-                    sys.stdout.flush()
-                journals = csv.writer(jf, colon_separated)
-                for row in cl.export_journals():
-                    journals.writerow(row)
-        if max_len > self.db.config.CSV_FIELD_SIZE:
-            print("Warning: config csv_field_size should be at least %s" %
-                  max_len, file=sys.stderr)
-        return 0
+        trace_search = False
+        if args and args[0] == "trace_search":
+            trace_search = True
 
-    def do_exporttables(self, args):
-        ''"""Usage: exporttables [[-]class[,class]] export_dir
-        Export the database to colon-separated-value files, excluding the
-        files below $TRACKER_HOME/db/files/ (which can be archived separately).
-        To include the files, use the export command.
-
-        Optionally limit the export to just the named classes
-        or exclude the named classes, if the 1st argument starts with '-'.
-
-        This action exports the current data from the database into
-        colon-separated-value files that are placed in the nominated
-        destination directory.
-        """
-        return self.do_export(args, export_files=False)
-
-    def do_import(self, args, import_files=True):
-        ''"""Usage: import import_dir
-        Import a database from the directory containing CSV files,
-        two per class to import.
-
-        The files used in the import are:
-
-        <class>.csv
-          This must define the same properties as the class (including
-          having a "header" line with those property names.)
-        <class>-journals.csv
-          This defines the journals for the items being imported.
-
-        The imported nodes will have the same nodeid as defined in the
-        import file, thus replacing any existing content.
-
-        The new nodes are added to the existing database - if you want to
-        create a new database using the imported data, then create a new
-        database (or, tediously, retire all the old data.)
-        """
-        if len(args) < 1:
-            raise UsageError(_('Not enough arguments supplied'))
-
-        if hasattr(csv, 'field_size_limit'):
-            csv.field_size_limit(self.db.config.CSV_FIELD_SIZE)
-
-        # directory to import from
-        dir = args[0]
-
-        class colon_separated(csv.excel):
-            delimiter = ':'
+        templates = self.listTemplates(trace_search=trace_search)
 
-        # import all the files
-        for file in os.listdir(dir):
-            classname, ext = os.path.splitext(file)
-            # we only care about CSV files
-            if ext != '.csv' or classname.endswith('-journals'):
-                continue
-
-            cl = self.get_class(classname)
-
-            # ensure that the properties and the CSV file headings match
-            with open(os.path.join(dir, file), 'r') as f:
-                reader = csv.reader(f, colon_separated)
-                file_props = None
-                maxid = 1
-                # loop through the file and create a node for each entry
-                for n, r in enumerate(reader):
-                    if file_props is None:
-                        file_props = r
-                        continue
-
-                    if self.verbose:
-                        sys.stdout.write('\rImporting %s - %s' % (classname, n))
-                        sys.stdout.flush()
-
-                    # do the import and figure the current highest nodeid
-                    nodeid = cl.import_list(file_props, r)
-                    if hasattr(cl, 'import_files') and import_files:
-                        cl.import_files(dir, nodeid)
-                    maxid = max(maxid, int(nodeid))
-
-                # (print to sys.stdout here to allow tests to squash it .. ugh)
-                print(file=sys.stdout)
-
-            # import the journals
-            with open(os.path.join(args[0], classname + '-journals.csv'), 'r') as f:
-                reader = csv.reader(f, colon_separated)
-                cl.import_journals(reader)
-
-            # (print to sys.stdout here to allow tests to squash it .. ugh)
-            print('setting', classname, maxid+1, file=sys.stdout)
-
-            # set the id counter
-            self.db.setid(classname, str(maxid+1))
-
-        self.db_uncommitted = True
-        return 0
-
-    def do_importtables(self, args):
-        ''"""Usage: importtables export_dir
-
-        This imports the database tables exported using exporttables.
-        """
-        return self.do_import(args, import_files=False)
-
-    def do_pack(self, args):
-        ''"""Usage: pack period | date
-
-        Remove journal entries older than a period of time specified or
-        before a certain date.
+        for name in sorted(list(templates.keys())):
+            templates[name]['description'] = textwrap.fill(
+                "\n".join([line.lstrip() for line in
+                           templates[name]['description'].split("\n")]),
+                70,
+                subsequent_indent="      "
+            )
+            print("""
+Name: %(name)s
+Path: %(path)s
+Desc: %(description)s
+""" % templates[name])
 
-        A period is specified using the suffixes "y", "m", and "d". The
-        suffix "w" (for "week") means 7 days.
-
-              "3y" means three years
-              "2y 1m" means two years and one month
-              "1m 25d" means one month and 25 days
-              "2w 3d" means two weeks and three days
-
-        Date format is "YYYY-MM-DD" eg:
-            2001-01-01
-
-        """
-        if len(args) != 1:
-            raise UsageError(_('Not enough arguments supplied'))
-
-        # are we dealing with a period or a date
-        value = args[0]
-        date_re = re.compile(r"""
-              (?P<date>\d\d\d\d-\d\d?-\d\d?)? # yyyy-mm-dd
-              (?P<period>(\d+y\s*)?(\d+m\s*)?(\d+d\s*)?)?
-              """, re.VERBOSE)
-        m = date_re.match(value)
-        if not m:
-            raise ValueError(_('Invalid format'))
-        m = m.groupdict()
-        if m['period']:
-            pack_before = date.Date(". - %s" % value)
-        elif m['date']:
-            pack_before = date.Date(value)
-        self.db.pack(pack_before)
-        self.db_uncommitted = True
-        return 0
-
-    designator_re = re.compile('([A-Za-z]+)([0-9]+)')
-
-    def do_reindex(self, args, desre=designator_re):
-        ''"""Usage: reindex [classname|designator]*
-        Re-generate a tracker's search indexes.
-
-        This will re-generate the search indexes for a tracker.
-        This will typically happen automatically.
-        """
-        if args:
-            for arg in args:
-                m = desre.match(arg)
-                if m:
-                    cl = self.get_class(m.group(1))
-                    try:
-                        cl.index(m.group(2))
-                    except IndexError:
-                        raise UsageError(_('no such item "%(designator)s"') % {
-                            'designator': arg})
-                else:
-                    cl = self.get_class(arg)
-                    self.db.reindex(arg)
-        else:
-            self.db.reindex(show_progress=True)
-        return 0
-
-    def do_security(self, args):
-        ''"""Usage: security [Role name]
-
-             Display the Permissions available to one or all Roles.
+    def do_updateconfig(self, args):
+        ''"""Usage: updateconfig <filename>
+        Generate an updated tracker config file (ini style) in
+        <filename>. Use current settings from existing roundup
+        tracker in tracker home.
         """
-        if len(args) == 1:
-            role = args[0]
-            try:
-                roles = [(args[0], self.db.security.role[args[0]])]
-            except KeyError:
-                sys.stdout.write(_('No such Role "%(role)s"\n') % locals())
-                return 1
-        else:
-            roles = list(self.db.security.role.items())
-            role = self.db.config.NEW_WEB_USER_ROLES
-            if ',' in role:
-                sys.stdout.write(_('New Web users get the Roles "%(role)s"\n')
-                                 % locals())
-            else:
-                sys.stdout.write(_('New Web users get the Role "%(role)s"\n')
-                                 % locals())
-            role = self.db.config.NEW_EMAIL_USER_ROLES
-            if ',' in role:
-                sys.stdout.write(_('New Email users get the Roles "%(role)s"\n') % locals())
-            else:
-                sys.stdout.write(_('New Email users get the Role "%(role)s"\n') % locals())
-        roles.sort()
-        for _rolename, role in roles:
-            sys.stdout.write(_('Role "%(name)s":\n') % role.__dict__)
-            for permission in role.permissions:
-                d = permission.__dict__
-                if permission.klass:
-                    if permission.properties:
-                        sys.stdout.write(_(
-                            ' %(description)s (%(name)s for "%(klass)s"' +
-                            ': %(properties)s only)\n') % d)
-                        # verify that properties exist; report bad props
-                        bad_props = []
-                        cl = self.db.getclass(permission.klass)
-                        class_props = cl.getprops(protected=True)
-                        for p in permission.properties:
-                            if p in class_props:
-                                continue
-                            else:
-                                bad_props.append(p)
-                        if bad_props:
-                            sys.stdout.write(_(
-                                '\n  **Invalid properties for %(class)s: '
-                                '%(props)s\n\n') % {
-                                    "class": permission.klass,
-                                    "props": bad_props})
-                            return 1
-                    else:
-                        sys.stdout.write(_(' %(description)s (%(name)s for '
-                                           '"%(klass)s" only)\n') % d)
-                else:
-                    sys.stdout.write(_(' %(description)s (%(name)s)\n') % d)
-        return 0
-
-    def do_migrate(self, args):
-        ''"""Usage: migrate
-
-        Update a tracker's database to be compatible with the Roundup
-        codebase.
-
-        You should run the "migrate" command for your tracker once
-        you've installed the latest codebase.
-
-        Do this before you use the web, command-line or mail interface
-        and before any users access the tracker.
-
-        This command will respond with either "Tracker updated" (if
-        you've not previously run it on an RDBMS backend) or "No
-        migration action required" (if you have run it, or have used
-        another interface to the tracker, or possibly because you are
-        using anydbm).
-
-        It's safe to run this even if it's not required, so just get
-        into the habit.
-        """
-        if self.db.db_version_updated:
-            print(_('Tracker updated to schema version %s.') %
-                  self.db.database_schema['version'])
-            self.db_uncommitted = True
-        else:
-            print(_('No migration action required. At schema version %s.') %
-                  self.db.database_schema['version'])
-        return 0
-
-    def do_perftest(self, args):
-        ''"""Usage: perftest [mode] [arguments]*
-
-        Time operations in Roundup. Supported arguments:
-
-            [password] [rounds=<integer>] [scheme=<scheme>]
-
-        'password' is the default mode.  The tracker's config.ini
-        setting for 'password_pbkdf2_default_rounds' is the default
-        value for 'rounds'. On the command line, 'rounds' can include
-        thousands separator of ',' or '.'.  'scheme' is the default
-        coded into Roundup. List supported schemes by using 'scheme='.
-
-        """
-        from roundup.anypy.time_ import perf_counter
-
-        props = {"rounds": self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS,
-                 "scheme": password.Password.known_schemes[0]}
-
-        print_supported_schemes = lambda: print(
-            "Supported schemes (default is first, case "
-            "sensitive):\n   %s." %
-            ", ".join(password.Password.known_schemes))
-
-        if (args[0].find("=") != -1):
-            args.insert(0, 'password')
-
-        props.update(self.props_from_args(args[1:]))
-
-        if args[0] == "password":
-            try:
-                # convert 10,000,000 or 10.000.000 to 10000000
-                r = int(re.sub('[,.]', '', props['rounds']))
-                if r < 1000:
-                    print(_("Invalid 'rounds'. Must be larger than 999."))
-                    return
-                props['rounds'] = r
-            except (TypeError, ValueError):
-                print(_("Invalid 'rounds'. It must be an integer not: %s") %
-                      props['rounds'])
-                return
-            if props['scheme'] is None:
-                print_supported_schemes()
-                return
-
-            self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS = props['rounds']
-
-            try:
-                tic = perf_counter()
-                pw_hash = password.encodePassword(
-                    "this is a long password to hash",
-                    props['scheme'],
-                    None,
-                    config=self.db.config
-                )
-                toc = perf_counter()
-            except password.PasswordValueError as e:
-                print(e)
-                print_supported_schemes()
-                return
-
-            if props['scheme'].startswith('PBKDF2'):
-                (rounds, salt, _raw_salt, digest) = password.pbkdf2_unpack(
-                    pw_hash)
-            else:
-                rounds = _("scheme does not support rounds.")
-
-            print(_(
-                "Hash time: %(time)0.9f seconds, scheme: %(scheme)s, "
-                "rounds: %(rounds)s") %
-                  {"time": toc-tic, "scheme": props['scheme'],
-                   "rounds": rounds})
+        self.do_genconfig(args, update=True)
 
     def run_command(self, args):
         """Run a single command

Roundup Issue Tracker: http://roundup-tracker.org/