diff options
author | Magnus Hagander | 2019-01-21 09:42:05 +0000 |
---|---|---|
committer | Magnus Hagander | 2019-01-26 15:19:26 +0000 |
commit | a10eb6420bfe48ec51a73a04058b67d685b686b7 (patch) | |
tree | e2befc7cd05b12ae10ac8fc7ac8d5ac5ba1e170c | |
parent | 9875fd8537b25ae69b67f0fd0ffdf8aada1ea250 (diff) |
More generic changes from 2to3
-rw-r--r-- | pgweb/account/urls.py | 2 | ||||
-rw-r--r-- | pgweb/core/views.py | 2 | ||||
-rw-r--r-- | pgweb/downloads/views.py | 8 | ||||
-rw-r--r-- | pgweb/featurematrix/models.py | 2 | ||||
-rw-r--r-- | pgweb/security/admin.py | 4 | ||||
-rw-r--r-- | pgweb/security/management/commands/update_cve_links.py | 2 | ||||
-rw-r--r-- | pgweb/security/models.py | 4 | ||||
-rw-r--r-- | pgweb/util/helpers.py | 2 | ||||
-rw-r--r-- | pgweb/util/middleware.py | 4 | ||||
-rw-r--r-- | pgweb/util/signals.py | 21 | ||||
-rwxr-xr-x | tools/docs/docload.py | 2 | ||||
-rwxr-xr-x | tools/ftp/spider_ftp.py | 2 | ||||
-rwxr-xr-x | tools/ftp/spider_yum.py | 10 | ||||
-rwxr-xr-x | tools/purgehook/purgehook.py | 2 | ||||
-rw-r--r-- | tools/search/crawler/lib/basecrawler.py | 2 | ||||
-rw-r--r-- | tools/search/crawler/lib/genericsite.py | 2 | ||||
-rw-r--r-- | tools/search/crawler/lib/parsers.py | 4 | ||||
-rwxr-xr-x | tools/search/crawler/webcrawler.py | 2 |
18 files changed, 37 insertions, 40 deletions
diff --git a/pgweb/account/urls.py b/pgweb/account/urls.py index 9467ca0b..293836de 100644 --- a/pgweb/account/urls.py +++ b/pgweb/account/urls.py @@ -53,5 +53,5 @@ urlpatterns = [ url(r'^signup/oauth/$', pgweb.account.views.signup_oauth), ] -for provider in settings.OAUTH.keys(): +for provider in list(settings.OAUTH.keys()): urlpatterns.append(url(r'^login/({0})/$'.format(provider), pgweb.account.oauthclient.login_oauth)) diff --git a/pgweb/core/views.py b/pgweb/core/views.py index bd33eebf..4ec07709 100644 --- a/pgweb/core/views.py +++ b/pgweb/core/views.py @@ -291,7 +291,7 @@ def admin_purge(request): url = request.POST['url'] expr = request.POST['expr'] xkey = request.POST['xkey'] - l = len(filter(None, [url, expr, xkey])) + l = len([_f for _f in [url, expr, xkey] if _f]) if l == 0: # Nothing specified return HttpResponseRedirect('.') diff --git a/pgweb/downloads/views.py b/pgweb/downloads/views.py index 3554e2b7..866faa10 100644 --- a/pgweb/downloads/views.py +++ b/pgweb/downloads/views.py @@ -5,7 +5,7 @@ from django.views.decorators.csrf import csrf_exempt from django.conf import settings import os -import cPickle as pickle +import pickle as pickle import json from pgweb.util.decorators import nocache @@ -74,9 +74,9 @@ def ftpbrowser(request, subpath): del allnodes # Add all directories - directories = [{'link': k, 'url': k, 'type': 'd'} for k, v in node.items() if v['t'] == 'd'] + directories = [{'link': k, 'url': k, 'type': 'd'} for k, v in list(node.items()) if v['t'] == 'd'] # Add all symlinks (only directories supported) - directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k, v in node.items() if v['t'] == 'l']) + directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k, v in list(node.items()) if v['t'] == 'l']) # A ittle early sorting wouldn't go amiss, so .. ends up at the top directories.sort(key=version_sort, reverse=True) @@ -86,7 +86,7 @@ def ftpbrowser(request, subpath): directories.insert(0, {'link': '[Parent Directory]', 'url': '..'}) # Fetch files - files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k, v in node.items() if v['t'] == 'f'] + files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k, v in list(node.items()) if v['t'] == 'f'] breadcrumbs = [] if subpath: diff --git a/pgweb/featurematrix/models.py b/pgweb/featurematrix/models.py index 9e79ec8b..4234c656 100644 --- a/pgweb/featurematrix/models.py +++ b/pgweb/featurematrix/models.py @@ -6,7 +6,7 @@ choices_map = { 2: {'str': 'Obsolete', 'class': 'obs', 'bgcolor': '#ddddff'}, 3: {'str': '?', 'class': 'unk', 'bgcolor': '#ffffaa'}, } -choices = [(k, v['str']) for k, v in choices_map.items()] +choices = [(k, v['str']) for k, v in list(choices_map.items())] class FeatureGroup(models.Model): diff --git a/pgweb/security/admin.py b/pgweb/security/admin.py index 9a3e902a..0d94f6e0 100644 --- a/pgweb/security/admin.py +++ b/pgweb/security/admin.py @@ -29,10 +29,10 @@ class SecurityPatchForm(forms.ModelForm): def clean(self): d = super(SecurityPatchForm, self).clean() - vecs = [v for k, v in d.items() if k.startswith('vector_')] + vecs = [v for k, v in list(d.items()) if k.startswith('vector_')] empty = [v for v in vecs if v == ''] if len(empty) != len(vecs) and len(empty) != 0: - for k in d.keys(): + for k in list(d.keys()): if k.startswith('vector_'): self.add_error(k, 'Either specify all vector values or none') return d diff --git a/pgweb/security/management/commands/update_cve_links.py b/pgweb/security/management/commands/update_cve_links.py index d07641d4..bc3d27c0 100644 --- a/pgweb/security/management/commands/update_cve_links.py +++ b/pgweb/security/management/commands/update_cve_links.py @@ -35,4 +35,4 @@ and have been made visible on the website. {0} """.format("\n".join(newly_visible))) - map(varnish_purge, SecurityPatch.purge_urls) + list(map(varnish_purge, SecurityPatch.purge_urls)) diff --git a/pgweb/security/models.py b/pgweb/security/models.py index 659e568a..18418561 100644 --- a/pgweb/security/models.py +++ b/pgweb/security/models.py @@ -8,7 +8,7 @@ from pgweb.news.models import NewsArticle import cvss -vector_choices = {k: list(v.items()) for k, v in cvss.constants3.METRICS_VALUE_NAMES.items()} +vector_choices = {k: list(v.items()) for k, v in list(cvss.constants3.METRICS_VALUE_NAMES.items())} component_choices = ( ('core server', 'Core server product'), @@ -42,7 +42,7 @@ def other_vectors_validator(val): if v not in cvss.constants3.METRICS_VALUES[k]: raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format( k, v, - ", ".join(cvss.constants3.METRICS_VALUES[k].keys()), + ", ".join(list(cvss.constants3.METRICS_VALUES[k].keys())), )) except ValidationError: raise diff --git a/pgweb/util/helpers.py b/pgweb/util/helpers.py index 68628ce6..f4fee7b4 100644 --- a/pgweb/util/helpers.py +++ b/pgweb/util/helpers.py @@ -34,7 +34,7 @@ def simple_form(instancetype, itemid, request, formclass, formtemplate='base/for # Set fixed fields. Note that this will not work if the fixed fields are ManyToMany, # but we'll fix that sometime in the future if fixedfields: - for k, v in fixedfields.items(): + for k, v in list(fixedfields.items()): setattr(r, k, v) r.save() diff --git a/pgweb/util/middleware.py b/pgweb/util/middleware.py index fa7803f7..a6d1dd91 100644 --- a/pgweb/util/middleware.py +++ b/pgweb/util/middleware.py @@ -51,13 +51,13 @@ class PgMiddleware(object): ('font', ["'self'", "fonts.gstatic.com", "data:", ]), ]) if hasattr(response, 'x_allow_extra_sources'): - for k, v in response.x_allow_extra_sources.items(): + for k, v in list(response.x_allow_extra_sources.items()): if k in sources: sources[k].extend(v) else: sources[k] = v - security_policies = ["{0}-src {1}".format(k, " ".join(v)) for k, v in sources.items()] + security_policies = ["{0}-src {1}".format(k, " ".join(v)) for k, v in list(sources.items())] if not getattr(response, 'x_allow_frames', False): response['X-Frame-Options'] = 'DENY' diff --git a/pgweb/util/signals.py b/pgweb/util/signals.py index 15179cb6..992679ee 100644 --- a/pgweb/util/signals.py +++ b/pgweb/util/signals.py @@ -31,17 +31,14 @@ def _get_full_text_diff(obj, oldobj): return "This object does not know how to express ifself." s = "\n\n".join(["\n".join( - filter( - lambda x: not x.startswith('@@'), - difflib.unified_diff( - _get_attr_value(oldobj, n).splitlines(), - _get_attr_value(obj, n).splitlines(), - n=1, - lineterm='', - fromfile=n, - tofile=n, - ) - ) + [x for x in difflib.unified_diff( + _get_attr_value(oldobj, n).splitlines(), + _get_attr_value(obj, n).splitlines(), + n=1, + lineterm='', + fromfile=n, + tofile=n, + ) if not x.startswith('@@')] ) for n in fieldlist if _get_attr_value(oldobj, n) != _get_attr_value(obj, n)]) if not s: return None @@ -174,7 +171,7 @@ def my_post_save_handler(sender, **kwargs): purgelist = instance.purge_urls() else: purgelist = instance.purge_urls - map(varnish_purge, purgelist) + list(map(varnish_purge, purgelist)) def register_basic_signal_handlers(): diff --git a/tools/docs/docload.py b/tools/docs/docload.py index f71e896c..7d03bffd 100755 --- a/tools/docs/docload.py +++ b/tools/docs/docload.py @@ -8,7 +8,7 @@ import tarfile import re import tidy from optparse import OptionParser -from ConfigParser import ConfigParser +from configparser import ConfigParser import psycopg2 diff --git a/tools/ftp/spider_ftp.py b/tools/ftp/spider_ftp.py index 8ea35574..4582bc2a 100755 --- a/tools/ftp/spider_ftp.py +++ b/tools/ftp/spider_ftp.py @@ -9,7 +9,7 @@ import sys import os from datetime import datetime -import cPickle as pickle +import pickle as pickle import codecs import urllib2 diff --git a/tools/ftp/spider_yum.py b/tools/ftp/spider_yum.py index 6528fba0..79ab45bd 100755 --- a/tools/ftp/spider_yum.py +++ b/tools/ftp/spider_yum.py @@ -28,7 +28,7 @@ archs = ['x86_64', 'i386', 'i686', 'ppc64le'] def generate_platform(dirname, familyprefix, ver, installer, systemd): - for f in platform_names.keys(): + for f in list(platform_names.keys()): yield ('%s-%s' % (f, ver), { 't': platform_names[f].format(ver), 'p': os.path.join(dirname, '{0}-{1}'.format(familyprefix, ver)), @@ -66,7 +66,7 @@ if __name__ == "__main__": reporpms[v] = {} vroot = os.path.join(args.yumroot, v) for dirpath, dirnames, filenames in os.walk(vroot): - rmatches = filter(None, (re_reporpm.match(f) for f in sorted(filenames, reverse=True))) + rmatches = [_f for _f in (re_reporpm.match(f) for f in sorted(filenames, reverse=True)) if _f] if rmatches: familypath = os.path.join(*dirpath.split('/')[-2:]) @@ -76,7 +76,7 @@ if __name__ == "__main__": shortdist, shortver, ver = r.groups(1) found = False - for p, pinfo in platforms.items(): + for p, pinfo in list(platforms.items()): if pinfo['p'] == familypath and pinfo['f'] == shortdist: if p not in reporpms[v]: reporpms[v][p] = {} @@ -89,8 +89,8 @@ if __name__ == "__main__": pass # Filter all platforms that are not used - platforms = {k: v for k, v in platforms.iteritems() if v['found']} - for k, v in platforms.iteritems(): + platforms = {k: v for k, v in platforms.items() if v['found']} + for k, v in platforms.items(): del v['found'] j = json.dumps({'platforms': platforms, 'reporpms': reporpms}) diff --git a/tools/purgehook/purgehook.py b/tools/purgehook/purgehook.py index 2ccfdf88..3f064028 100755 --- a/tools/purgehook/purgehook.py +++ b/tools/purgehook/purgehook.py @@ -9,7 +9,7 @@ import sys import os import hashlib -from ConfigParser import ConfigParser +from configparser import ConfigParser import psycopg2 # Templates that we don't want to ban automatically diff --git a/tools/search/crawler/lib/basecrawler.py b/tools/search/crawler/lib/basecrawler.py index aeae20b8..c7579cd3 100644 --- a/tools/search/crawler/lib/basecrawler.py +++ b/tools/search/crawler/lib/basecrawler.py @@ -55,7 +55,7 @@ class BaseSiteCrawler(object): curs = self.dbconn.cursor() curs.execute("DELETE FROM webpages WHERE site=%(site)s AND NOT suburl=ANY(%(urls)s)", { 'site': self.siteid, - 'urls': self.pages_crawled.keys(), + 'urls': list(self.pages_crawled.keys()), }) if curs.rowcount: log("Deleted %s pages no longer accessible" % curs.rowcount) diff --git a/tools/search/crawler/lib/genericsite.py b/tools/search/crawler/lib/genericsite.py index 2204cdac..5372d6c5 100644 --- a/tools/search/crawler/lib/genericsite.py +++ b/tools/search/crawler/lib/genericsite.py @@ -14,7 +14,7 @@ class GenericSiteCrawler(BaseSiteCrawler): # We need to seed the crawler with every URL we've already seen, since # we don't recrawl the contents if they haven't changed. - allpages = self.scantimes.keys() + allpages = list(self.scantimes.keys()) # Figure out if there are any excludes to deal with (beyond the # robots.txt ones) diff --git a/tools/search/crawler/lib/parsers.py b/tools/search/crawler/lib/parsers.py index 4315548d..c19bf932 100644 --- a/tools/search/crawler/lib/parsers.py +++ b/tools/search/crawler/lib/parsers.py @@ -1,10 +1,10 @@ import re import urllib -from StringIO import StringIO +from io import StringIO import dateutil.parser from datetime import timedelta -from HTMLParser import HTMLParser +from html.parser import HTMLParser from lib.log import log diff --git a/tools/search/crawler/webcrawler.py b/tools/search/crawler/webcrawler.py index ee65b54c..9627d3f2 100755 --- a/tools/search/crawler/webcrawler.py +++ b/tools/search/crawler/webcrawler.py @@ -6,7 +6,7 @@ from lib.genericsite import GenericSiteCrawler from lib.sitemapsite import SitemapSiteCrawler from lib.threadwrapper import threadwrapper -from ConfigParser import ConfigParser +from configparser import ConfigParser import psycopg2 import time |