Mercurial > p > roundup > code
changeset 6506:9906ab204610 issue2550923_computed_property
merge from trunk. travisci xenil plus more
| author | John Rouillard <rouilj@ieee.org> |
|---|---|
| date | Fri, 08 Oct 2021 00:14:44 -0400 |
| parents | 30358e334232 (current diff) e594da00f99f (diff) |
| children | 85db90cc1705 |
| files | CHANGES.txt roundup/instance.py |
| diffstat | 18 files changed, 469 insertions(+), 116 deletions(-) [+] |
line wrap: on
line diff
--- a/.travis.yml Thu Sep 16 14:30:56 2021 -0400 +++ b/.travis.yml Fri Oct 08 00:14:44 2021 -0400 @@ -1,18 +1,12 @@ +# check syntax using: +# https://config.travis-ci.com/explore + os: linux language: python cache: pip -python: - - 2.7 - - 3.4 - - 3.6 - - 3.7 - - 3.8 - - 3.9-dev - - nightly - #I would like to build and test the maint-1.6 and trunk/default #but we need different environments for these: # maint-1.6 only python 2, install only psycopg2 version with support for @@ -23,29 +17,18 @@ # - maint-1.6 dist: - - xenial + - bionic -# Commented out stanza for bionic 18.04. Currently testing on -# xenial 16.04. -# Consider move to this after 2.1.0 release. Python 3.4 -# is not supported on bionic and 3.4 is obsolete. Was retained -# because 3.4 was EPEL version for centos 7. With centos demise, -# remove it from 'python:' settings and test earliest still supported -# release, last two production releases and nightly to cut down on cost -# of testing. -# dist: -# - bionic -# -# python: -# - 2.7 -# - 3.6 -# - 3.8 -# - 3.9-dev -# - nightly -# -# services: -# - mysql -# - postgresql +python: + - 2.7 + - 3.9-dev + - 3.8 + - 3.6 + - nightly + +services: + - mysql + - postgresql jobs: allow_failures: # nightly not ready for prime time yet. @@ -102,7 +85,7 @@ - GPGME_VERSION=1.11.1 - cd /tmp - curl -s -O https://www.gnupg.org/ftp/gcrypt/gpgme/gpgme-$GPGME_VERSION.tar.bz2 - - tar -jxvf gpgme-$GPGME_VERSION.tar.bz2 + - tar -jxf gpgme-$GPGME_VERSION.tar.bz2 - cd gpgme-$GPGME_VERSION - ./configure --prefix=$VIRTUAL_ENV - make && make install @@ -137,11 +120,27 @@ # needed for test_mysql.mysqlDBTest.testFilteringSpecialChars - sed -i 's/CREATE DATABASE \%s/CREATE DATABASE \%s COLLATE utf8_general_ci/' roundup/backends/back_mysql.py + # build the .mo translation files and install them into a tree + # (locale/locale under roundup directory root) + # suitable for use by gettext. + - (cd locale; make local_install; ls -lR locale/de/LC_MESSAGES) + script: - PATH=$VIRTUAL_ENV/bin:$PATH - export LD_LIBRARY_PATH=$VIRTUAL_ENV/lib:$LD_LIBRARY_PATH - - py.test -v --maxfail=20 test/ --cov=roundup - + - if [[ "$TRAVIS_PYTHON_VERSION" != "2."* ]]; then + py.test + -W default + -W "ignore:SelectableGroups:DeprecationWarning" + -W "ignore:the imp module:DeprecationWarning:gpg.gpgme:15" + -W "ignore:'U' mode::docutils.io" + -W "ignore:unclosed:ResourceWarning:roundup.roundup.demo" + -W "ignore:unclosed file:ResourceWarning:enum" + -v --maxfail=20 test/ --cov=roundup; + fi + - if [[ "$TRAVIS_PYTHON_VERSION" == "2."* ]]; then + py.test -v --maxfail=20 test/ --cov=roundup; + fi after_success: - codecov
--- a/CHANGES.txt Thu Sep 16 14:30:56 2021 -0400 +++ b/CHANGES.txt Fri Oct 08 00:14:44 2021 -0400 @@ -15,6 +15,17 @@ Fixed: +- issue2551161 - Fix ResourceWarnings when running with -W default. + Cleaned up leaking file descriptors from zopetal pre-compile, python + module compile and loading localization file. (John Rouillard) +- When using roundup-server with native SSL, only accept TLS v1.2. + Previously it used to accept only TLS v1.1. 1.1 is deprecated by + chrome. I don't expect this to be a major problem since a front + end server (apache, Nginx...) is usually customer facing and + terminates SSL. +- Fix hang when valid user without authorization for REST tries to use + the rest interface. + Features: - issue2551147 - Enable compression of http responses in roundup.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/issues/extensions/templating.py Fri Oct 08 00:14:44 2021 -0400 @@ -0,0 +1,104 @@ +import logging +logger = logging.getLogger('extension') + +import sys +from roundup import __version__ as roundup_version +def AboutPage(db): + "report useful info about this tracker" + + def is_module_loaded(module): + modules = sys.modules.keys() + return module in modules + + def get_status_of_module(module, prefix=None, version=True): + modules = sys.modules.keys() + is_enabled = module in modules + if is_enabled: + if module == 'pyme': + from pyme import version + version="version %s"%version.versionstr + elif module == 'pychart': + from pychart import version + version="version %s"%version.version + elif module == 'sqlite3': + from sqlite3 import version + version="version %s"%version + else: + if version: + m = __import__(module) + try: + version="version %s"%m.__version__ + except AttributeError: + version="version unavailable - exception thrown" + else: + version="version unavailable" + + if prefix: + return "%s %s %s enabled: %s"%(prefix, module, version, is_enabled) + else: + return "Module: %s %s enabled: %s"%(module, version, is_enabled) + else: + if prefix: + return "%s %s enabled: %s"%(prefix, module, is_enabled) + else: + return "Module: %s enabled: %s"%(module, is_enabled) + + info = [] + + info.append("Tracker name: %s<br>"%db.config['TRACKER_NAME']) + + info.append("<h2>Operating environment</h2>") + info.append('<a href="http://roundup.sourceforge.net/">Roundup</a> version: %s<br>'%roundup_version) + info.append("Python Version: %s<br>"%sys.version) + + info.append("<h2>Configuration</h2>") + + backend = db.config['RDBMS_BACKEND'] + info.append("Roundup backend: %s<br>"%backend) + if backend != 'anydbm': + info.append("Roundup db cache: %s<br>"%db.config['RDBMS_CACHE_SIZE']) + info.append("Roundup isolation_level: %s<br>"%db.config['RDBMS_ISOLATION_LEVEL']) + + info.append("Roundup template: %s<br>"%db.config['TEMPLATE_ENGINE']) + + info.append("<h2>Database modules</h2>") + info.append(get_status_of_module('anydbm', version=False) + "<br>") + info.append(get_status_of_module('sqlite3') + "<br>") + info.append(get_status_of_module('MySQLdb') + "<br>") + info.append(get_status_of_module('psycopg2') + "<br>") + + info.append("<h2>Other modules</h2>") + + info.append(get_status_of_module('pytz') + "<br>") + if is_module_loaded('xapian'): + info.append(get_status_of_module('xapian', prefix="Test indexer:") + + "<br>") + elif is_module_loaded('whoosh'): + info.append(get_status_of_module('whoosh', prefix="Test indexer:") + + "<br>") + else: + info.append("Text indexer: Native enabled: True<br>") + + info.append(get_status_of_module('pyme') + "<br>") + info.append(get_status_of_module('OpenSSL') + "<br>") + info.append(get_status_of_module('pychart') + "<br>") + + info.append(get_status_of_module('jinja2') + "<br>") + + if db._db.getuid() == "1": + #may leak sensitive info about system, directory paths etc. + #and keys so require admin user access. Consider expanding + #to Admin rights for tracker. + info.append("") + info.append("Module Path: %r"%sys.path) + + info.append("<h2>Environment Variables</h2>") + info.append("<pre>") # include pre to prevent wrapping of values + for key in db._client.env.keys(): + info.append("%s=%s"%(key,db._client.env[key]) + "<br>") + info.append("</pre>") + return "\n".join(info) + +def init(instance): + instance.registerUtil('AboutPage', AboutPage) +
--- a/locale/GNUmakefile Thu Sep 16 14:30:56 2021 -0400 +++ b/locale/GNUmakefile Fri Oct 08 00:14:44 2021 -0400 @@ -50,6 +50,13 @@ --copyright-holder="See Roundup README.txt" \ -o $(TEMPLATE) $(SOURCES) +local_install: dist + for file in $(MO_FILES); do \ + lang=`basename $$file .mo`; \ + mkdir -p locale/$$lang/LC_MESSAGES; \ + cp $$file locale/$$lang/LC_MESSAGES/roundup.mo; \ + done + # helps to check template file before check in diff: svn diff roundup.pot|grep -v '^[-+]#'|vim -Rv -
--- a/roundup/admin.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/admin.py Fri Oct 08 00:14:44 2021 -0400 @@ -1310,64 +1310,60 @@ sys.stdout.write('Exporting %s WITHOUT the files\r\n' % classname) - f = open(os.path.join(dir, classname+'.csv'), 'w') - writer = csv.writer(f, colon_separated) + with open(os.path.join(dir, classname+'.csv'), 'w') as f: + writer = csv.writer(f, colon_separated) - properties = cl.getprops() - propnames = cl.export_propnames() - fields = propnames[:] - fields.append('is retired') - writer.writerow(fields) + properties = cl.getprops() + propnames = cl.export_propnames() + fields = propnames[:] + fields.append('is retired') + writer.writerow(fields) - # If a node has a key, sort all nodes by key - # with retired nodes first. Retired nodes - # must occur before a non-retired node with - # the same key. Otherwise you get an - # IntegrityError: UNIQUE constraint failed: - # _class.__retired__, _<class>._<keyname> - # on imports to rdbms. - all_nodes = cl.getnodeids() + # If a node has a key, sort all nodes by key + # with retired nodes first. Retired nodes + # must occur before a non-retired node with + # the same key. Otherwise you get an + # IntegrityError: UNIQUE constraint failed: + # _class.__retired__, _<class>._<keyname> + # on imports to rdbms. + all_nodes = cl.getnodeids() - classkey = cl.getkey() - if classkey: # False sorts before True, so negate is_retired - keysort = lambda i: (cl.get(i, classkey), - not cl.is_retired(i)) - all_nodes.sort(key=keysort) - # if there is no classkey no need to sort + classkey = cl.getkey() + if classkey: # False sorts before True, so negate is_retired + keysort = lambda i: (cl.get(i, classkey), + not cl.is_retired(i)) + all_nodes.sort(key=keysort) + # if there is no classkey no need to sort - for nodeid in all_nodes: - if self.verbose: - sys.stdout.write('\rExporting %s - %s' % - (classname, nodeid)) - sys.stdout.flush() - node = cl.getnode(nodeid) - exp = cl.export_list(propnames, nodeid) - lensum = sum([len(repr_export(node[p])) for p in propnames]) - # for a safe upper bound of field length we add - # difference between CSV len and sum of all field lengths - d = sum([len(x) for x in exp]) - lensum - if not d > 0: - raise AssertionError("Bad assertion d > 0") - for p in propnames: - ll = len(repr_export(node[p])) + d - if ll > max_len: - max_len = ll - writer.writerow(exp) - if export_files and hasattr(cl, 'export_files'): - cl.export_files(dir, nodeid) - - # close this file - f.close() + for nodeid in all_nodes: + if self.verbose: + sys.stdout.write('\rExporting %s - %s' % + (classname, nodeid)) + sys.stdout.flush() + node = cl.getnode(nodeid) + exp = cl.export_list(propnames, nodeid) + lensum = sum([len(repr_export(node[p])) for p in propnames]) + # for a safe upper bound of field length we add + # difference between CSV len and sum of all field lengths + d = sum([len(x) for x in exp]) - lensum + if not d > 0: + raise AssertionError("Bad assertion d > 0") + for p in propnames: + ll = len(repr_export(node[p])) + d + if ll > max_len: + max_len = ll + writer.writerow(exp) + if export_files and hasattr(cl, 'export_files'): + cl.export_files(dir, nodeid) # export the journals - jf = open(os.path.join(dir, classname+'-journals.csv'), 'w') - if self.verbose: - sys.stdout.write("\nExporting Journal for %s\n" % classname) - sys.stdout.flush() - journals = csv.writer(jf, colon_separated) - for row in cl.export_journals(): - journals.writerow(row) - jf.close() + with open(os.path.join(dir, classname+'-journals.csv'), 'w') as jf: + if self.verbose: + sys.stdout.write("\nExporting Journal for %s\n" % classname) + sys.stdout.flush() + journals = csv.writer(jf, colon_separated) + for row in cl.export_journals(): + journals.writerow(row) if max_len > self.db.config.CSV_FIELD_SIZE: print("Warning: config csv_field_size should be at least %s" % max_len, file=sys.stderr)
--- a/roundup/backends/blobfiles.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/backends/blobfiles.py Fri Oct 08 00:14:44 2021 -0400 @@ -332,7 +332,9 @@ # in multi-tracker (i.e. multi-umask) or modpython scenarios # the umask may have changed since last we set it. os.umask(self.umask) - open(name, 'wb').write(content) + fd = open(name, 'wb') + fd.write(content) + fd.close() def getfile(self, classname, nodeid, property): """Get the content of the file in the database.
--- a/roundup/backends/indexer_dbm.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/backends/indexer_dbm.py Fri Oct 08 00:14:44 2021 -0400 @@ -51,7 +51,9 @@ # for now the file itself is a flag self.force_reindex() elif os.path.exists(version): - version = open(version).read() + fd = open(version) + version = fd.read() + fd.close() # check the value and reindex if it's not the latest if version.strip() != '1': self.force_reindex() @@ -63,7 +65,9 @@ shutil.rmtree(self.indexdb_path) os.makedirs(self.indexdb_path) os.chmod(self.indexdb_path, 0o775) # nosec - allow group write - open(os.path.join(self.indexdb_path, 'version'), 'w').write('1\n') + fd = open(os.path.join(self.indexdb_path, 'version'), 'w') + fd.write('1\n') + fd.close() self.reindex = 1 self.changed = 1 @@ -260,6 +264,7 @@ filename = self.indexdb + initchar pickle_fh = open(filename, 'wb') pickle_fh.write(zlib.compress(pickle_str)) + pickle_fh.close() os.chmod(filename, 0o664) # save done
--- a/roundup/cgi/client.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/cgi/client.py Fri Oct 08 00:14:44 2021 -0400 @@ -607,7 +607,10 @@ if not self.db.security.hasPermission('Rest Access', self.userid): self.response_code = 403 - self.write(s2b('{ "error": { "status": 403, "msg": "Forbidden." } }')) + output = s2b('{ "error": { "status": 403, "msg": "Forbidden." } }') + self.setHeader("Content-Length", str(len(output))) + self.setHeader("Content-Type", "application/json") + self.write(output) return self.check_anonymous_access()
--- a/roundup/cgi/engine_zopetal.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/cgi/engine_zopetal.py Fri Oct 08 00:14:44 2021 -0400 @@ -42,7 +42,8 @@ pt = RoundupPageTemplate() # use pt_edit so we can pass the content_type guess too content_type = mimetypes.guess_type(filename)[0] or 'text/html' - pt.pt_edit(open(src).read(), content_type) + with open(src) as srcd: + pt.pt_edit(srcd.read(), content_type) pt.id = filename pt.mtime = stime # Add it to the cache. We cannot do this until the template
--- a/roundup/i18n.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/i18n.py Fri Oct 08 00:14:44 2021 -0400 @@ -194,6 +194,7 @@ # note: current implementation of gettext_module # always adds fallback to the end of the fallback chain. translator.add_fallback(translation_class(mo)) + mo.close() except IOError: # ignore unreadable .mo files pass
--- a/roundup/instance.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/instance.py Fri Oct 08 00:14:44 2021 -0400 @@ -219,7 +219,8 @@ def _compile(self, fname): fname = os.path.join(self.tracker_home, fname) - return compile(builtins.open(fname).read(), fname, 'exec') + with builtins.open(fname) as fnamed: + return compile(fnamed.read(), fname, 'exec') def _exec(self, obj, env): if self.libdir:
--- a/roundup/mailer.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/mailer.py Fri Oct 08 00:14:44 2021 -0400 @@ -278,9 +278,11 @@ # that resulting file can be openened in a mailer fmt = '%a %b %m %H:%M:%S %Y' unixfrm = 'From %s %s' % (sender, Date('.').pretty(fmt)) - open(self.debug, 'a').write('%s\nFROM: %s\nTO: %s\n%s\n\n' % + debug_fh = open(self.debug, 'a') + debug_fh.write('%s\nFROM: %s\nTO: %s\n%s\n\n' % (unixfrm, sender, ', '.join(to), message)) + debug_fh.close() else: # now try to send the message try:
--- a/roundup/scripts/roundup_server.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/scripts/roundup_server.py Fri Oct 08 00:14:44 2021 -0400 @@ -120,7 +120,7 @@ cert.get_issuer().O = 'Self-Signed' cert.set_pubkey(pkey) cert.sign(pkey, 'sha512') - ctx = SSL.Context(OpenSSL.SSL.TLSv1_1_METHOD) + ctx = SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD) ctx.use_privatekey(pkey) ctx.use_certificate(cert) @@ -133,7 +133,7 @@ http_.server.HTTPServer.__init__(self, server_address, HandlerClass) self.socket = socket.socket(self.address_family, self.socket_type) if ssl_pem: - ctx = SSL.Context(SSL.TLSv1_1_METHOD) + ctx = SSL.Context(SSL.TLSv1_2_METHOD) ctx.use_privatekey_file(ssl_pem) ctx.use_certificate_file(ssl_pem) else:
--- a/roundup/test/memorydb.py Thu Sep 16 14:30:56 2021 -0400 +++ b/roundup/test/memorydb.py Fri Oct 08 00:14:44 2021 -0400 @@ -46,25 +46,32 @@ # load standard schema if not prefix.startswith('/'): prefix = os.path.join (os.path.dirname(__file__), prefix) + schema = os.path.join(prefix, 'schema.py') vars = hyperdb.__dict__ vars['Class'] = Class vars['FileClass'] = FileClass vars['IssueClass'] = IssueClass vars['db'] = db - exec(compile(open(schema).read(), schema, 'exec'), vars) + fd = open(schema) + exec(compile(fd.read(), schema, 'exec'), vars) + fd.close() + initial_data = os.path.join(prefix, 'initial_data.py') vars = dict(db=db, admin_email='admin@test.com', adminpw=password.Password('sekrit')) - exec(compile(open(initial_data).read(), initial_data, 'exec'), vars) + fd = open(initial_data) + exec(compile(fd.read(), initial_data, 'exec'), vars) + fd.close() # load standard detectors dirname = os.path.join(prefix, 'detectors') for fn in os.listdir(dirname): if not fn.endswith('.py'): continue vars = {} - exec(compile(open(os.path.join(dirname, fn)).read(), - os.path.join(dirname, fn), 'exec'), vars) + with open(os.path.join(dirname, fn)) as fd: + exec(compile(fd.read(), + os.path.join(dirname, fn), 'exec'), vars) vars['init'](db) tx_Source_init(db)
--- a/test/test_dates.py Thu Sep 16 14:30:56 2021 -0400 +++ b/test/test_dates.py Fri Oct 08 00:14:44 2021 -0400 @@ -37,10 +37,18 @@ class DateTestCase(unittest.TestCase): def setUp(self): + # force use of local locale directory. System locale dir + # doesn't have the locale files installed, and without wiping + # the default the .mo file sometimes isn't found. + i18n.LOCALE_DIRS=[] + i18n.LOCALE_DIRS.insert(0,"locale/locale") + self.old_gettext_ = i18n.gettext self.old_ngettext_ = i18n.ngettext i18n.gettext = i18n.get_translation(language='C').gettext + i18n.degettext = i18n.get_translation(language='de').gettext i18n.ngettext = i18n.get_translation(language='C').ngettext + i18n.dengettext = i18n.get_translation(language='de').ngettext def tearDown(self): i18n.gettext = self.old_gettext_ @@ -421,6 +429,46 @@ ae('-1y', '1 year ago') ae('-2y', '2 years ago') + def testIntervalPrettyDe(self): + gettext = i18n.gettext + ngettext = i18n.ngettext + + i18n.gettext = i18n.degettext + i18n.ngettext = i18n.dengettext + + def ae(spec, pretty): + self.assertEqual(Interval(spec).pretty(), pretty) + ae('2y', 'in 2 Jahren') + ae('1y', 'in 1 Jahr') + ae('2m', 'in 2 Monaten') + ae('59d', 'in 1 Monat') + ae('1m', 'in 1 Monat') + '''ae('29d', 'in 1 month') + ae('28d', 'in 4 weeks') + ae('8d', 'in 1 week') + ae('7d', 'in 7 days') + ae('1w', 'in 7 days') + ae('2d', 'in 2 days') + ae('1d', 'tomorrow') + ae('02:00:00', 'in 2 hours') + ae('01:59:00', 'in 1 3/4 hours') + ae('01:45:00', 'in 1 3/4 hours') + ae('01:30:00', 'in 1 1/2 hours') + ae('01:29:00', 'in 1 1/4 hours') + ae('01:00:00', 'in an hour') + ae('00:30:00', 'in 1/2 an hour') + ae('00:15:00', 'in 1/4 hour') + ae('00:02:00', 'in 2 minutes') + ae('00:01:00', 'in 1 minute') + ae('00:00:30', 'in a moment') + ae('-00:00:30', 'just now') + ae('-1d', 'yesterday') + ae('-1y', '1 year ago') + ae('-2y', '2 years ago')''' + + i18n.gettext = gettext + i18n.ngettext = ngettext + def testPyDatetime(self): d = datetime.datetime.now() Date(d)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/website/issues/extensions/templating.py Fri Oct 08 00:14:44 2021 -0400 @@ -0,0 +1,121 @@ +import logging +logger = logging.getLogger('extension') + +import sys +from roundup import __version__ as roundup_version +def AboutPage(db): + "report useful info about this tracker" + + def is_module_loaded(module): + modules = list(sys.modules.keys()) + return module in modules + + def get_status_of_module(module, prefix=None, version=True): + modules = list(sys.modules.keys()) + is_enabled = module in modules + if is_enabled: + if module == 'pyme': + from pyme import version + version="version %s"%version.versionstr + elif module == 'pychart': + from pychart import version + version="version %s"%version.version + elif module == 'sqlite3': + from sqlite3 import version + version="version %s"%version + elif module == 'xapian': + from xapian import version_string + version="version %s"%version_string() + else: + if version: + m = __import__(module) + try: + version="version %s"%m.__version__ + except AttributeError: + version="version unavailable - exception thrown" + else: + version="version unavailable" + + if prefix: + return "%s %s %s enabled: %s"%(prefix, module, version, is_enabled) + else: + return "Module: %s %s enabled: %s"%(module, version, is_enabled) + else: + if prefix: + return "%s %s enabled: %s"%(prefix, module, is_enabled) + else: + return "Module: %s enabled: %s"%(module, is_enabled) + + info = [] + + info.append("Tracker name: %s<br>"%db.config['TRACKER_NAME']) + + info.append("<h2>Operating environment</h2>") + info.append('<a href="http://roundup.sourceforge.net/">Roundup</a> version: %s<br>'%roundup_version) + info.append("Python Version: %s<br>"%sys.version) + + info.append("<h2>Configuration</h2>") + + backend = db.config['RDBMS_BACKEND'] + info.append("Roundup backend: %s<br>"%backend) + if backend != 'anydbm': + info.append("Roundup db cache: %s<br>"%db.config['RDBMS_CACHE_SIZE']) + info.append("Roundup isolation_level: %s<br>"%db.config['RDBMS_ISOLATION_LEVEL']) + + info.append("Roundup template: %s<br>"%db.config['TEMPLATE_ENGINE']) + + info.append("<h2>Database modules</h2>") + info.append(get_status_of_module('anydbm', version=False) + "<br>") + info.append(get_status_of_module('dbm', version=False) + "<br>") + info.append(get_status_of_module('sqlite3') + "<br>") + info.append(get_status_of_module('MySQLdb') + "<br>") + info.append(get_status_of_module('psycopg2') + "<br>") + + info.append("<h2>Other modules</h2>") + + indexer = db.config['INDEXER'] + if not indexer: + if is_module_loaded('xapian'): + indexer="unset using xapian" + elif is_module_loaded('whoosh'): + indexer="unset using woosh" + else: + indexer="unset using native" + else: + indexer="set to " + indexer + + info.append("Indexer used for full-text: %s<br>"%indexer) + + info.append("Available indexers:<br><ul>") + if is_module_loaded('xapian'): + info.append("<li>%s</li>"%get_status_of_module('xapian', prefix="Indexer loaded:")) + if is_module_loaded('whoosh'): + info.append("<li>%s</li>"%get_status_of_module('whoosh', prefix="Indexer loaded:")) + info.append("<li>Indexer loaded: native: True</li>") + info.append("</ul>") + info.append(get_status_of_module('pytz') + "<br>") + info.append(get_status_of_module('pyme') + "<br>") + info.append(get_status_of_module('OpenSSL') + "<br>") + info.append(get_status_of_module('pychart') + "<br>") + info.append(get_status_of_module('pygal') + "<br>") + + info.append(get_status_of_module('jinja2') + "<br>") + + uid = db._db.getuid() + if uid == "1" or db._db.user.has_role(uid,"Admin"): + #may leak sensitive info about system, directory paths etc. + #and keys so require admin user access. Consider expanding + #to Admin rights for tracker. + info.append("") + info.append("Module Path: %r"%sys.path) + + info.append("<h2>Environment Variables</h2>") + info.append("<pre>") # include pre to prevent wrapping of values + for key in db._client.env.keys(): + info.append("%s=%s"%(key,db._client.env[key]) + "<br>") + info.append("</pre>") + return "\n".join(info) + +def init(instance): + instance.registerUtil('AboutPage', AboutPage) +
--- a/website/issues/html/issue.item.html Thu Sep 16 14:30:56 2021 -0400 +++ b/website/issues/html/issue.item.html Fri Oct 08 00:14:44 2021 -0400 @@ -162,7 +162,7 @@ <input type="file" id="file-1@content" name="file-1@content" size="40"> </td> <th><label for="file-1@description" i18n:translate="">File Description</label>:</th> - <td colspan=3><input type="edit" id="file-1@description" name="file-1@description" size="40"></td> + <td colspan=3><input type="edit" class="fileDesc" id="file-1@description" name="file-1@description" size="40"></td> </tr> <tr tal:condition="context/is_edit_ok"> <td colspan=4> @@ -222,22 +222,55 @@ let fileInput = document.getElementById('file-1@content'); let fileDesc = document.getElementById('file-1@description'); + function make_clear_fileInput_closure(input) { + return function(ev) { input.value = ""; + ev.preventDefault();} + } + + + function make_new_clear_button() { + newClearInput=document.createElement('button'); + newClearInput.textContent = "X"; + newClearInput.setAttribute("aria-label", + "Clear next file input."); + newClearInput.setAttribute("title", + "Clear next file input."); + newClearInput.classList.add("clearButton"); + return newClearInput; + } + + function make_new_file_input() { + newInput=document.createElement('input'); + newInput.type="file"; + newInput.id="file-" + NextInputNum +"@content"; + newInput.name=newInput.id; + return newInput; + } + function add_file_input () { - // Only allow one change listener on newest input. fileInput.removeEventListener('change', add_file_input, false); + newClearInput = fileInput.insertAdjacentElement( + 'beforebegin', + make_new_clear_button()); + + // add change handler to file clear button + newClearInput.addEventListener('click', + make_clear_fileInput_closure(fileInput), + false); + + /* Insert break so next input is on new line */ + br = fileInput.insertAdjacentElement('afterend', + document.createElement('br')); /* create new file input to get next dragged file */ /* <input type="file" name="file-2@content"> for 2, 3, 4, ... */ - newInput=document.createElement('input'); - newInput.type="file"; - newInput.id="file-" + NextInputNum +"@content"; - newInput.name=newInput.id; - fileInput = fileInput.insertAdjacentElement('afterend', - newInput); + fileInput = br.insertAdjacentElement('afterend', + make_new_file_input()); + // add change hander to newest file input fileInput.addEventListener('change', add_file_input, // create new input for more files @@ -259,15 +292,20 @@ addLink.value="file-" + NextInputNum fileInput.insertAdjacentElement('afterend', addLink); - addLink=document.createElement('input'); - addLink.type="edit"; - addLink.id="file-" + NextInputNum + "@description"; - addLink.name=addLink.id - addLink.size = 40 - fileDesc=fileDesc.insertAdjacentElement('afterend', addLink); + /* break line before description field to prevent + wrapping multiple descriptions onto one line when + zoomed out or large display.*/ + br = fileDesc.insertAdjacentElement('afterend', + document.createElement('br')); + fileDesc=document.createElement('input'); + fileDesc.type="edit"; + fileDesc.id="file-" + NextInputNum + "@description"; + fileDesc.name=fileDesc.id + fileDesc.size = 40 + fileDesc.classList.add("fileDesc"); + fileDesc=br.insertAdjacentElement('afterend', fileDesc); NextInputNum = NextInputNum+1; - } function MarkDropZone(e, active) { @@ -365,7 +403,7 @@ }, false); </script> <style tal:attributes="nonce request/client/client_nonce"> - #FileArea input[type=file] ~ input[type=file] {display:block;} + #FileArea button.clearButton ~ input[type=file] {display:inline-block;} #DropZone { /* don't display dropzone by default. Displayed as block by javascript. */ display:none; @@ -378,6 +416,7 @@ /* lighter color */ background: rgba(255,255,255,0.4); } + input[id$=\@content], input.fileDesc {margin-block-end: 0.5em} </style> <p tal:condition="context/id" i18n:translate="">
--- a/website/issues/html/style.css Thu Sep 16 14:30:56 2021 -0400 +++ b/website/issues/html/style.css Fri Oct 08 00:14:44 2021 -0400 @@ -153,13 +153,19 @@ table:not(.list) th { text-align: left; + padding-block-end: 0.75em; +} + +table textarea { + width: 99%; } table th[colspan] { text-align: center; } /* Need some space between content of Issue List columns */ -td, th { padding-left: 1em; } +td, th { padding-left: 1em; + vertical-align: top; } tr.odd { background-color:#f5f5f5; }
