More generic changes from 2to3

This commit is contained in:
Magnus Hagander
2019-01-21 10:42:05 +01:00
parent 9875fd8537
commit a10eb6420b
18 changed files with 37 additions and 40 deletions

View File

@ -53,5 +53,5 @@ urlpatterns = [
url(r'^signup/oauth/$', pgweb.account.views.signup_oauth),
]
for provider in settings.OAUTH.keys():
for provider in list(settings.OAUTH.keys()):
urlpatterns.append(url(r'^login/({0})/$'.format(provider), pgweb.account.oauthclient.login_oauth))

View File

@ -291,7 +291,7 @@ def admin_purge(request):
url = request.POST['url']
expr = request.POST['expr']
xkey = request.POST['xkey']
l = len(filter(None, [url, expr, xkey]))
l = len([_f for _f in [url, expr, xkey] if _f])
if l == 0:
# Nothing specified
return HttpResponseRedirect('.')

View File

@ -5,7 +5,7 @@ from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
import os
import cPickle as pickle
import pickle as pickle
import json
from pgweb.util.decorators import nocache
@ -74,9 +74,9 @@ def ftpbrowser(request, subpath):
del allnodes
# Add all directories
directories = [{'link': k, 'url': k, 'type': 'd'} for k, v in node.items() if v['t'] == 'd']
directories = [{'link': k, 'url': k, 'type': 'd'} for k, v in list(node.items()) if v['t'] == 'd']
# Add all symlinks (only directories supported)
directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k, v in node.items() if v['t'] == 'l'])
directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k, v in list(node.items()) if v['t'] == 'l'])
# A ittle early sorting wouldn't go amiss, so .. ends up at the top
directories.sort(key=version_sort, reverse=True)
@ -86,7 +86,7 @@ def ftpbrowser(request, subpath):
directories.insert(0, {'link': '[Parent Directory]', 'url': '..'})
# Fetch files
files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k, v in node.items() if v['t'] == 'f']
files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k, v in list(node.items()) if v['t'] == 'f']
breadcrumbs = []
if subpath:

View File

@ -6,7 +6,7 @@ choices_map = {
2: {'str': 'Obsolete', 'class': 'obs', 'bgcolor': '#ddddff'},
3: {'str': '?', 'class': 'unk', 'bgcolor': '#ffffaa'},
}
choices = [(k, v['str']) for k, v in choices_map.items()]
choices = [(k, v['str']) for k, v in list(choices_map.items())]
class FeatureGroup(models.Model):

View File

@ -29,10 +29,10 @@ class SecurityPatchForm(forms.ModelForm):
def clean(self):
d = super(SecurityPatchForm, self).clean()
vecs = [v for k, v in d.items() if k.startswith('vector_')]
vecs = [v for k, v in list(d.items()) if k.startswith('vector_')]
empty = [v for v in vecs if v == '']
if len(empty) != len(vecs) and len(empty) != 0:
for k in d.keys():
for k in list(d.keys()):
if k.startswith('vector_'):
self.add_error(k, 'Either specify all vector values or none')
return d

View File

@ -35,4 +35,4 @@ and have been made visible on the website.
{0}
""".format("\n".join(newly_visible)))
map(varnish_purge, SecurityPatch.purge_urls)
list(map(varnish_purge, SecurityPatch.purge_urls))

View File

@ -8,7 +8,7 @@ from pgweb.news.models import NewsArticle
import cvss
vector_choices = {k: list(v.items()) for k, v in cvss.constants3.METRICS_VALUE_NAMES.items()}
vector_choices = {k: list(v.items()) for k, v in list(cvss.constants3.METRICS_VALUE_NAMES.items())}
component_choices = (
('core server', 'Core server product'),
@ -42,7 +42,7 @@ def other_vectors_validator(val):
if v not in cvss.constants3.METRICS_VALUES[k]:
raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format(
k, v,
", ".join(cvss.constants3.METRICS_VALUES[k].keys()),
", ".join(list(cvss.constants3.METRICS_VALUES[k].keys())),
))
except ValidationError:
raise

View File

@ -34,7 +34,7 @@ def simple_form(instancetype, itemid, request, formclass, formtemplate='base/for
# Set fixed fields. Note that this will not work if the fixed fields are ManyToMany,
# but we'll fix that sometime in the future
if fixedfields:
for k, v in fixedfields.items():
for k, v in list(fixedfields.items()):
setattr(r, k, v)
r.save()

View File

@ -51,13 +51,13 @@ class PgMiddleware(object):
('font', ["'self'", "fonts.gstatic.com", "data:", ]),
])
if hasattr(response, 'x_allow_extra_sources'):
for k, v in response.x_allow_extra_sources.items():
for k, v in list(response.x_allow_extra_sources.items()):
if k in sources:
sources[k].extend(v)
else:
sources[k] = v
security_policies = ["{0}-src {1}".format(k, " ".join(v)) for k, v in sources.items()]
security_policies = ["{0}-src {1}".format(k, " ".join(v)) for k, v in list(sources.items())]
if not getattr(response, 'x_allow_frames', False):
response['X-Frame-Options'] = 'DENY'

View File

@ -31,17 +31,14 @@ def _get_full_text_diff(obj, oldobj):
return "This object does not know how to express ifself."
s = "\n\n".join(["\n".join(
filter(
lambda x: not x.startswith('@@'),
difflib.unified_diff(
[x for x in difflib.unified_diff(
_get_attr_value(oldobj, n).splitlines(),
_get_attr_value(obj, n).splitlines(),
n=1,
lineterm='',
fromfile=n,
tofile=n,
)
)
) if not x.startswith('@@')]
) for n in fieldlist if _get_attr_value(oldobj, n) != _get_attr_value(obj, n)])
if not s:
return None
@ -174,7 +171,7 @@ def my_post_save_handler(sender, **kwargs):
purgelist = instance.purge_urls()
else:
purgelist = instance.purge_urls
map(varnish_purge, purgelist)
list(map(varnish_purge, purgelist))
def register_basic_signal_handlers():

View File

@ -8,7 +8,7 @@ import tarfile
import re
import tidy
from optparse import OptionParser
from ConfigParser import ConfigParser
from configparser import ConfigParser
import psycopg2

View File

@ -9,7 +9,7 @@
import sys
import os
from datetime import datetime
import cPickle as pickle
import pickle as pickle
import codecs
import urllib2

View File

@ -28,7 +28,7 @@ archs = ['x86_64', 'i386', 'i686', 'ppc64le']
def generate_platform(dirname, familyprefix, ver, installer, systemd):
for f in platform_names.keys():
for f in list(platform_names.keys()):
yield ('%s-%s' % (f, ver), {
't': platform_names[f].format(ver),
'p': os.path.join(dirname, '{0}-{1}'.format(familyprefix, ver)),
@ -66,7 +66,7 @@ if __name__ == "__main__":
reporpms[v] = {}
vroot = os.path.join(args.yumroot, v)
for dirpath, dirnames, filenames in os.walk(vroot):
rmatches = filter(None, (re_reporpm.match(f) for f in sorted(filenames, reverse=True)))
rmatches = [_f for _f in (re_reporpm.match(f) for f in sorted(filenames, reverse=True)) if _f]
if rmatches:
familypath = os.path.join(*dirpath.split('/')[-2:])
@ -76,7 +76,7 @@ if __name__ == "__main__":
shortdist, shortver, ver = r.groups(1)
found = False
for p, pinfo in platforms.items():
for p, pinfo in list(platforms.items()):
if pinfo['p'] == familypath and pinfo['f'] == shortdist:
if p not in reporpms[v]:
reporpms[v][p] = {}
@ -89,8 +89,8 @@ if __name__ == "__main__":
pass
# Filter all platforms that are not used
platforms = {k: v for k, v in platforms.iteritems() if v['found']}
for k, v in platforms.iteritems():
platforms = {k: v for k, v in platforms.items() if v['found']}
for k, v in platforms.items():
del v['found']
j = json.dumps({'platforms': platforms, 'reporpms': reporpms})

View File

@ -9,7 +9,7 @@
import sys
import os
import hashlib
from ConfigParser import ConfigParser
from configparser import ConfigParser
import psycopg2
# Templates that we don't want to ban automatically

View File

@ -55,7 +55,7 @@ class BaseSiteCrawler(object):
curs = self.dbconn.cursor()
curs.execute("DELETE FROM webpages WHERE site=%(site)s AND NOT suburl=ANY(%(urls)s)", {
'site': self.siteid,
'urls': self.pages_crawled.keys(),
'urls': list(self.pages_crawled.keys()),
})
if curs.rowcount:
log("Deleted %s pages no longer accessible" % curs.rowcount)

View File

@ -14,7 +14,7 @@ class GenericSiteCrawler(BaseSiteCrawler):
# We need to seed the crawler with every URL we've already seen, since
# we don't recrawl the contents if they haven't changed.
allpages = self.scantimes.keys()
allpages = list(self.scantimes.keys())
# Figure out if there are any excludes to deal with (beyond the
# robots.txt ones)

View File

@ -1,10 +1,10 @@
import re
import urllib
from StringIO import StringIO
from io import StringIO
import dateutil.parser
from datetime import timedelta
from HTMLParser import HTMLParser
from html.parser import HTMLParser
from lib.log import log

View File

@ -6,7 +6,7 @@ from lib.genericsite import GenericSiteCrawler
from lib.sitemapsite import SitemapSiteCrawler
from lib.threadwrapper import threadwrapper
from ConfigParser import ConfigParser
from configparser import ConfigParser
import psycopg2
import time