More generic changes from 2to3

This commit is contained in:
Magnus Hagander
2019-01-21 10:42:05 +01:00
parent 9875fd8537
commit a10eb6420b
18 changed files with 37 additions and 40 deletions

View File

@ -8,7 +8,7 @@ import tarfile
import re
import tidy
from optparse import OptionParser
from ConfigParser import ConfigParser
from configparser import ConfigParser
import psycopg2

View File

@ -9,7 +9,7 @@
import sys
import os
from datetime import datetime
import cPickle as pickle
import pickle as pickle
import codecs
import urllib2

View File

@ -28,7 +28,7 @@ archs = ['x86_64', 'i386', 'i686', 'ppc64le']
def generate_platform(dirname, familyprefix, ver, installer, systemd):
for f in platform_names.keys():
for f in list(platform_names.keys()):
yield ('%s-%s' % (f, ver), {
't': platform_names[f].format(ver),
'p': os.path.join(dirname, '{0}-{1}'.format(familyprefix, ver)),
@ -66,7 +66,7 @@ if __name__ == "__main__":
reporpms[v] = {}
vroot = os.path.join(args.yumroot, v)
for dirpath, dirnames, filenames in os.walk(vroot):
rmatches = filter(None, (re_reporpm.match(f) for f in sorted(filenames, reverse=True)))
rmatches = [_f for _f in (re_reporpm.match(f) for f in sorted(filenames, reverse=True)) if _f]
if rmatches:
familypath = os.path.join(*dirpath.split('/')[-2:])
@ -76,7 +76,7 @@ if __name__ == "__main__":
shortdist, shortver, ver = r.groups(1)
found = False
for p, pinfo in platforms.items():
for p, pinfo in list(platforms.items()):
if pinfo['p'] == familypath and pinfo['f'] == shortdist:
if p not in reporpms[v]:
reporpms[v][p] = {}
@ -89,8 +89,8 @@ if __name__ == "__main__":
pass
# Filter all platforms that are not used
platforms = {k: v for k, v in platforms.iteritems() if v['found']}
for k, v in platforms.iteritems():
platforms = {k: v for k, v in platforms.items() if v['found']}
for k, v in platforms.items():
del v['found']
j = json.dumps({'platforms': platforms, 'reporpms': reporpms})

View File

@ -9,7 +9,7 @@
import sys
import os
import hashlib
from ConfigParser import ConfigParser
from configparser import ConfigParser
import psycopg2
# Templates that we don't want to ban automatically

View File

@ -55,7 +55,7 @@ class BaseSiteCrawler(object):
curs = self.dbconn.cursor()
curs.execute("DELETE FROM webpages WHERE site=%(site)s AND NOT suburl=ANY(%(urls)s)", {
'site': self.siteid,
'urls': self.pages_crawled.keys(),
'urls': list(self.pages_crawled.keys()),
})
if curs.rowcount:
log("Deleted %s pages no longer accessible" % curs.rowcount)

View File

@ -14,7 +14,7 @@ class GenericSiteCrawler(BaseSiteCrawler):
# We need to seed the crawler with every URL we've already seen, since
# we don't recrawl the contents if they haven't changed.
allpages = self.scantimes.keys()
allpages = list(self.scantimes.keys())
# Figure out if there are any excludes to deal with (beyond the
# robots.txt ones)

View File

@ -1,10 +1,10 @@
import re
import urllib
from StringIO import StringIO
from io import StringIO
import dateutil.parser
from datetime import timedelta
from HTMLParser import HTMLParser
from html.parser import HTMLParser
from lib.log import log

View File

@ -6,7 +6,7 @@ from lib.genericsite import GenericSiteCrawler
from lib.sitemapsite import SitemapSiteCrawler
from lib.threadwrapper import threadwrapper
from ConfigParser import ConfigParser
from configparser import ConfigParser
import psycopg2
import time