Fix deprecated comparison structs

This commit is contained in:
Magnus Hagander
2019-01-17 21:19:57 +01:00
parent e3ec36b50c
commit 00ab822ea8
21 changed files with 67 additions and 82 deletions

View File

@ -67,7 +67,7 @@ class SignupForm(forms.Form):
def clean_email2(self): def clean_email2(self):
# If the primary email checker had an exception, the data will be gone # If the primary email checker had an exception, the data will be gone
# from the cleaned_data structure # from the cleaned_data structure
if not self.cleaned_data.has_key('email'): if 'email' not in self.cleaned_data:
return self.cleaned_data['email2'] return self.cleaned_data['email2']
email1 = self.cleaned_data['email'].lower() email1 = self.cleaned_data['email'].lower()
email2 = self.cleaned_data['email2'].lower() email2 = self.cleaned_data['email2'].lower()
@ -157,7 +157,7 @@ class ChangeEmailForm(forms.Form):
def clean_email2(self): def clean_email2(self):
# If the primary email checker had an exception, the data will be gone # If the primary email checker had an exception, the data will be gone
# from the cleaned_data structure # from the cleaned_data structure
if not self.cleaned_data.has_key('email'): if 'email' not in self.cleaned_data:
return self.cleaned_data['email2'].lower() return self.cleaned_data['email2'].lower()
email1 = self.cleaned_data['email'].lower() email1 = self.cleaned_data['email'].lower()
email2 = self.cleaned_data['email2'].lower() email2 = self.cleaned_data['email2'].lower()

View File

@ -26,7 +26,7 @@ def _login_oauth(request, provider, authurl, tokenurl, scope, authdatafunc):
redir = '{0}/account/login/{1}/'.format(settings.SITE_ROOT, provider) redir = '{0}/account/login/{1}/'.format(settings.SITE_ROOT, provider)
oa = OAuth2Session(client_id, scope=scope, redirect_uri=redir) oa = OAuth2Session(client_id, scope=scope, redirect_uri=redir)
if request.GET.has_key('code'): if 'code' in request.GET:
log.info("Completing {0} oauth2 step from {1}".format(provider, get_client_ip(request))) log.info("Completing {0} oauth2 step from {1}".format(provider, get_client_ip(request)))
# Receiving a login request from the provider, so validate data # Receiving a login request from the provider, so validate data
@ -151,7 +151,7 @@ def oauth_login_github(request):
def oauth_login_facebook(request): def oauth_login_facebook(request):
def _facebook_auth_data(oa): def _facebook_auth_data(oa):
r = oa.get('https://graph.facebook.com/me?fields=email,first_name,last_name').json() r = oa.get('https://graph.facebook.com/me?fields=email,first_name,last_name').json()
if not 'email' in r: if 'email' not in r:
raise OAuthException("Your Facebook profile must provide an email address in order to log in") raise OAuthException("Your Facebook profile must provide an email address in order to log in")
return (r['email'], return (r['email'],
@ -174,7 +174,7 @@ def oauth_login_facebook(request):
def oauth_login_microsoft(request): def oauth_login_microsoft(request):
def _microsoft_auth_data(oa): def _microsoft_auth_data(oa):
r = oa.get("https://apis.live.net/v5.0/me").json() r = oa.get("https://apis.live.net/v5.0/me").json()
if not 'emails' in r or not 'account' in r['emails']: if 'emails' not in r or 'account' not in r['emails']:
raise OAuthException("Your Facebook profile must provide an email address in order to log in") raise OAuthException("Your Facebook profile must provide an email address in order to log in")
return (r['emails']['account'], return (r['emails']['account'],

View File

@ -25,9 +25,7 @@ class ReCaptchaWidget(forms.widgets.Widget):
def value_from_datadict(self, data, files, name): def value_from_datadict(self, data, files, name):
if settings.NOCAPTCHA: if settings.NOCAPTCHA:
return None return None
if data.has_key('g-recaptcha-response'): return data.get('g-recaptcha-response', None)
return data['g-recaptcha-response']
return None
class ReCaptchaField(forms.CharField): class ReCaptchaField(forms.CharField):

View File

@ -204,7 +204,7 @@ def confirm_change_email(request, tokenhash):
@login_required @login_required
def listobjects(request, objtype): def listobjects(request, objtype):
if not objtypes.has_key(objtype): if objtype not in objtypes:
raise Http404("Object type not found") raise Http404("Object type not found")
o = objtypes[objtype] o = objtypes[objtype]
@ -214,7 +214,7 @@ def listobjects(request, objtype):
'unapproved': o['objects'](request.user).filter(approved=False), 'unapproved': o['objects'](request.user).filter(approved=False),
}, },
'title': o['title'], 'title': o['title'],
'submit_header': o.has_key('submit_header') and o['submit_header'] or None, 'submit_header': o.get('submit_header', None),
'suburl': objtype, 'suburl': objtype,
}) })
@ -375,9 +375,9 @@ def signup_complete(request):
@frame_sources('https://www.google.com/') @frame_sources('https://www.google.com/')
@transaction.atomic @transaction.atomic
def signup_oauth(request): def signup_oauth(request):
if not request.session.has_key('oauth_email') \ if 'oauth_email' not in request.session \
or not request.session.has_key('oauth_firstname') \ or 'oauth_firstname' not in request.session \
or not request.session.has_key('oauth_lastname'): or 'oauth_lastname' not in request.session:
return HttpServerError(request, 'Invalid redirect received') return HttpServerError(request, 'Invalid redirect received')
if request.method == 'POST': if request.method == 'POST':
@ -413,7 +413,7 @@ def signup_oauth(request):
# Redirect to the sessions page, or to the account page # Redirect to the sessions page, or to the account page
# if none was given. # if none was given.
return HttpResponseRedirect(request.session.pop('login_next', '/account/')) return HttpResponseRedirect(request.session.pop('login_next', '/account/'))
elif request.GET.has_key('do_abort'): elif 'do_abort' in request.GET:
del request.session['oauth_email'] del request.session['oauth_email']
del request.session['oauth_firstname'] del request.session['oauth_firstname']
del request.session['oauth_lastname'] del request.session['oauth_lastname']
@ -459,7 +459,7 @@ def communityauth(request, siteid):
# "suburl" - old style way of passing parameters # "suburl" - old style way of passing parameters
# deprecated - will be removed once all sites have migrated # deprecated - will be removed once all sites have migrated
if request.GET.has_key('su'): if 'su' in request.GET:
su = request.GET['su'] su = request.GET['su']
if not su.startswith('/'): if not su.startswith('/'):
su = None su = None
@ -468,7 +468,7 @@ def communityauth(request, siteid):
# "data" - new style way of passing parameter, where we only # "data" - new style way of passing parameter, where we only
# care that it's characters are what's in base64. # care that it's characters are what's in base64.
if request.GET.has_key('d'): if 'd' in request.GET:
d = request.GET['d'] d = request.GET['d']
if d != urllib.quote_plus(d, '=$'): if d != urllib.quote_plus(d, '=$'):
# Invalid character, so drop it # Invalid character, so drop it
@ -608,14 +608,14 @@ def communityauth_search(request, siteid):
site = get_object_or_404(CommunityAuthSite, pk=siteid) site = get_object_or_404(CommunityAuthSite, pk=siteid)
q = Q(is_active=True) q = Q(is_active=True)
if request.GET.has_key('s') and request.GET['s']: if 's' in request.GET and request.GET['s']:
# General search term, match both name and email # General search term, match both name and email
q = q & (Q(email__icontains=request.GET['s']) | Q(first_name__icontains=request.GET['s']) | Q(last_name__icontains=request.GET['s'])) q = q & (Q(email__icontains=request.GET['s']) | Q(first_name__icontains=request.GET['s']) | Q(last_name__icontains=request.GET['s']))
elif request.GET.has_key('e') and request.GET['e']: elif 'e' in request.GET and request.GET['e']:
q = q & Q(email__icontains=request.GET['e']) q = q & Q(email__icontains=request.GET['e'])
elif request.GET.has_key('n') and request.GET['n']: elif 'n' in request.GET and request.GET['n']:
q = q & (Q(first_name__icontains=request.GET['n']) | Q(last_name__icontains=request.GET['n'])) q = q & (Q(first_name__icontains=request.GET['n']) | Q(last_name__icontains=request.GET['n']))
elif request.GET.has_key('u') and request.GET['u']: elif 'u' in request.GET and request.GET['u']:
q = q & Q(username=request.GET['u']) q = q & Q(username=request.GET['u'])
else: else:
raise Http404('No search term specified') raise Http404('No search term specified')

View File

@ -44,9 +44,9 @@ class OrganisationForm(forms.ModelForm):
def save(self, commit=True): def save(self, commit=True):
model = super(OrganisationForm, self).save(commit=False) model = super(OrganisationForm, self).save(commit=False)
if self.cleaned_data.has_key('add_manager') and self.cleaned_data['add_manager']: if 'add_manager' in self.cleaned_data and self.cleaned_data['add_manager']:
model.managers.add(User.objects.get(email=self.cleaned_data['add_manager'].lower())) model.managers.add(User.objects.get(email=self.cleaned_data['add_manager'].lower()))
if self.cleaned_data.has_key('remove_manager') and self.cleaned_data['remove_manager']: if 'remove_manager' in self.cleaned_data and self.cleaned_data['remove_manager']:
for toremove in self.cleaned_data['remove_manager']: for toremove in self.cleaned_data['remove_manager']:
model.managers.remove(toremove) model.managers.remove(toremove)

View File

@ -205,7 +205,7 @@ _dynamic_cssmap = {
@cache(hours=6) @cache(hours=6)
def dynamic_css(request, css): def dynamic_css(request, css):
if not _dynamic_cssmap.has_key(css): if css not in _dynamic_cssmap:
raise Http404('CSS not found') raise Http404('CSS not found')
files = _dynamic_cssmap[css] files = _dynamic_cssmap[css]
resp = HttpResponse(content_type='text/css') resp = HttpResponse(content_type='text/css')
@ -222,7 +222,7 @@ def dynamic_css(request, css):
# If we somehow referred to a file that didn't exist, or # If we somehow referred to a file that didn't exist, or
# one that we couldn't access. # one that we couldn't access.
raise Http404('CSS (sub) not found') raise Http404('CSS (sub) not found')
if request.META.has_key('HTTP_IF_MODIFIED_SINCE'): if 'HTTP_IF_MODIFIED_SINCE' in request.META:
# This code is mostly stolen from django :) # This code is mostly stolen from django :)
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", matches = re.match(r"^([^;]+)(; length=([0-9]+))?$",
request.META.get('HTTP_IF_MODIFIED_SINCE'), request.META.get('HTTP_IF_MODIFIED_SINCE'),

View File

@ -50,7 +50,7 @@ def ftpbrowser(request, subpath):
parent = '' parent = ''
for d in subpath.split('/'): for d in subpath.split('/'):
# Check if allnodes contains a node matching the path # Check if allnodes contains a node matching the path
if allnodes[parent].has_key(d): if d in allnodes[parent]:
if allnodes[parent][d]['t'] == 'd': if allnodes[parent][d]['t'] == 'd':
canonpath = os.path.join(canonpath, d) canonpath = os.path.join(canonpath, d)
elif allnodes[parent][d]['t'] == 'l': elif allnodes[parent][d]['t'] == 'l':
@ -102,9 +102,9 @@ def ftpbrowser(request, subpath):
breadcrumbs.append({'name': pathpiece, 'path': breadroot}) breadcrumbs.append({'name': pathpiece, 'path': breadroot})
# Check if there are any "content files" we should render directly on the webpage # Check if there are any "content files" we should render directly on the webpage
file_readme = (node.has_key('README') and node['README']['t'] == 'f') and node['README']['c'] or None file_readme = ('README' in node and node['README']['t'] == 'f') and node['README']['c'] or None
file_message = (node.has_key('.message') and node['.message']['t'] == 'f') and node['.message']['c'] or None file_message = ('.message' in node and node['.message']['t'] == 'f') and node['.message']['c'] or None
file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None file_maintainer = ('CURRENT_MAINTAINER' in node and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None
del node del node

View File

@ -43,7 +43,7 @@ class EventForm(forms.ModelForm):
if self.instance.pk and self.instance.approved: if self.instance.pk and self.instance.approved:
if self.cleaned_data['enddate'] != self.instance.enddate: if self.cleaned_data['enddate'] != self.instance.enddate:
raise ValidationError("You cannot change the dates on events that have been approved") raise ValidationError("You cannot change the dates on events that have been approved")
if self.cleaned_data.has_key('startdate') and self.cleaned_data['enddate'] < self.cleaned_data['startdate']: if 'startdate' in self.cleaned_data and self.cleaned_data['enddate'] < self.cleaned_data['startdate']:
raise ValidationError("End date cannot be before start date!") raise ValidationError("End date cannot be before start date!")
return self.cleaned_data['enddate'] return self.cleaned_data['enddate']

View File

@ -60,20 +60,17 @@ def search(request):
# constants that we might eventually want to make configurable # constants that we might eventually want to make configurable
hitsperpage = 20 hitsperpage = 20
if request.GET.has_key('m') and request.GET['m'] == '1': if request.GET.get('m', '') == '1':
searchlists = True searchlists = True
if request.GET.has_key('l'): if request.GET.get('l', '') != '':
if request.GET['l'] != '': try:
try: listid = int(request.GET['l'])
listid = int(request.GET['l']) except:
except:
listid = None
else:
listid = None listid = None
else: else:
# Listid not specified. But do we have the name? # Listid not specified. But do we have the name?
if request.GET.has_key('ln'): if 'ln' in request.GET:
try: try:
ll = MailingList.objects.get(listname=request.GET['ln']) ll = MailingList.objects.get(listname=request.GET['ln'])
listid = ll.id listid = ll.id
@ -84,7 +81,7 @@ def search(request):
else: else:
listid = None listid = None
if request.GET.has_key('d'): if 'd' in request.GET:
try: try:
dateval = int(request.GET['d']) dateval = int(request.GET['d'])
except: except:
@ -92,9 +89,9 @@ def search(request):
else: else:
dateval = None dateval = None
if request.GET.has_key('s'): if 's' in request.GET:
listsort = request.GET['s'] listsort = request.GET['s']
if not listsort in ('r', 'd', 'i'): if listsort not in ('r', 'd', 'i'):
listsort = 'r' listsort = 'r'
else: else:
listsort = 'r' listsort = 'r'
@ -103,9 +100,9 @@ def search(request):
dateval = 365 dateval = 365
sortoptions = ( sortoptions = (
{'val': 'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')}, {'val': 'r', 'text': 'Rank', 'selected': request.GET.get('s', '') not in ('d', 'i')},
{'val': 'd', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'}, {'val': 'd', 'text': 'Date', 'selected': request.GET.get('s', '') == 'd'},
{'val': 'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'}, {'val': 'i', 'text': 'Reverse date', 'selected': request.GET.get('s', '') == 'i'},
) )
dateoptions = ( dateoptions = (
{'val': -1, 'text': 'anytime'}, {'val': -1, 'text': 'anytime'},
@ -117,18 +114,11 @@ def search(request):
) )
else: else:
searchlists = False searchlists = False
if request.GET.has_key('u'): suburl = request.GET.get('u', None)
suburl = request.GET['u'] allsites = request.GET.get('a', None) == "1"
else:
suburl = None
if request.GET.has_key('a'):
allsites = (request.GET['a'] == "1")
else:
allsites = False
# Check that we actually have something to search for # Check that we actually have something to search for
if not request.GET.has_key('q') or request.GET['q'] == '': if request.GET.get('q', '') != '':
if searchlists: if searchlists:
return render(request, 'search/listsearch.html', { return render(request, 'search/listsearch.html', {
'search_error': "No search term specified.", 'search_error': "No search term specified.",
@ -151,12 +141,9 @@ def search(request):
}) })
# Is the request being paged? # Is the request being paged?
if request.GET.has_key('p'): try:
try: pageum = int(request.GET.get('p', 1))
pagenum = int(request.GET['p']) except:
except:
pagenum = 1
else:
pagenum = 1 pagenum = 1
firsthit = (pagenum - 1) * hitsperpage + 1 firsthit = (pagenum - 1) * hitsperpage + 1

View File

@ -35,11 +35,11 @@ def other_vectors_validator(val):
try: try:
for vector in val.split('/'): for vector in val.split('/'):
k, v = vector.split(':') k, v = vector.split(':')
if not cvss.constants3.METRICS_VALUES.has_key(k): if k not in cvss.constants3.METRICS_VALUES:
raise ValidationError("Metric {0} is unknown".format(k)) raise ValidationError("Metric {0} is unknown".format(k))
if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'): if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'):
raise ValidationError("Metric {0} must be specified in the dropdowns".format(k)) raise ValidationError("Metric {0} must be specified in the dropdowns".format(k))
if not cvss.constants3.METRICS_VALUES[k].has_key(v): if v not in cvss.constants3.METRICS_VALUES[k]:
raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format( raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format(
k, v, k, v,
", ".join(cvss.constants3.METRICS_VALUES[k].keys()), ", ".join(cvss.constants3.METRICS_VALUES[k].keys()),

View File

@ -28,7 +28,7 @@ class PgwebAdmin(admin.ModelAdmin):
def change_view(self, request, object_id, form_url='', extra_context=None): def change_view(self, request, object_id, form_url='', extra_context=None):
if hasattr(self.model, 'send_notification') and self.model.send_notification: if hasattr(self.model, 'send_notification') and self.model.send_notification:
# Anything that sends notification supports manual notifications # Anything that sends notification supports manual notifications
if extra_context == None: if extra_context is None:
extra_context = dict() extra_context = dict()
extra_context['notifications'] = ModerationNotification.objects.filter(objecttype=self.model.__name__, objectid=object_id).order_by('date') extra_context['notifications'] = ModerationNotification.objects.filter(objecttype=self.model.__name__, objectid=object_id).order_by('date')
@ -57,7 +57,7 @@ class PgwebAdmin(admin.ModelAdmin):
if change and hasattr(self.model, 'send_notification') and self.model.send_notification: if change and hasattr(self.model, 'send_notification') and self.model.send_notification:
# We only do processing if something changed, not when adding # We only do processing if something changed, not when adding
# a new object. # a new object.
if request.POST.has_key('new_notification') and request.POST['new_notification']: if 'new_notification' in request.POST and request.POST['new_notification']:
# Need to send off a new notification. We'll also store # Need to send off a new notification. We'll also store
# it in the database for future reference, of course. # it in the database for future reference, of course.
if not obj.org.email: if not obj.org.email:

View File

@ -87,7 +87,7 @@ sitenav = {
def get_nav_menu(section): def get_nav_menu(section):
if sitenav.has_key(section): if section in sitenav:
return sitenav[section] return sitenav[section]
else: else:
return {} return {}

View File

@ -27,7 +27,7 @@ def get_client_ip(request):
or behind one of our SSL proxies, make sure to get the *actual* client IP, or behind one of our SSL proxies, make sure to get the *actual* client IP,
and not the IP of the cache/proxy. and not the IP of the cache/proxy.
""" """
if request.META.has_key('HTTP_X_FORWARDED_FOR'): if 'HTTP_X_FORWARDED_FOR' in request.META:
# There is a x-forwarded-for header, so trust it but only if the actual connection # There is a x-forwarded-for header, so trust it but only if the actual connection
# is coming in from one of our frontends. # is coming in from one of our frontends.
if request.META['REMOTE_ADDR'] in settings.FRONTEND_SERVERS: if request.META['REMOTE_ADDR'] in settings.FRONTEND_SERVERS:

View File

@ -54,7 +54,7 @@ def _get_all_notification_fields(obj):
else: else:
# Include all field names except specified ones, # Include all field names except specified ones,
# that are local to this model (not auto created) # that are local to this model (not auto created)
return [f.name for f in obj._meta.get_fields() if not f.name in ('approved', 'submitter', 'id', ) and not f.auto_created] return [f.name for f in obj._meta.get_fields() if f.name not in ('approved', 'submitter', 'id', ) and not f.auto_created]
def _get_attr_value(obj, fieldname): def _get_attr_value(obj, fieldname):

View File

@ -54,7 +54,7 @@ def login(request):
from django.contrib.auth.views import login from django.contrib.auth.views import login
return login(request, template_name='admin.html') return login(request, template_name='admin.html')
if request.GET.has_key('next'): if 'next' in request.GET:
# Put together an url-encoded dict of parameters we're getting back, # Put together an url-encoded dict of parameters we're getting back,
# including a small nonce at the beginning to make sure it doesn't # including a small nonce at the beginning to make sure it doesn't
# encrypt the same way every time. # encrypt the same way every time.
@ -85,13 +85,13 @@ def logout(request):
# Receive an authentication response from the main website and try # Receive an authentication response from the main website and try
# to log the user in. # to log the user in.
def auth_receive(request): def auth_receive(request):
if request.GET.has_key('s') and request.GET['s'] == "logout": if 's' in request.GET and request.GET['s'] == "logout":
# This was a logout request # This was a logout request
return HttpResponseRedirect('/') return HttpResponseRedirect('/')
if not request.GET.has_key('i'): if 'i' not in request:
return HttpResponse("Missing IV in url!", status=400) return HttpResponse("Missing IV in url!", status=400)
if not request.GET.has_key('d'): if 'd' not in request.GET:
return HttpResponse("Missing data in url!", status=400) return HttpResponse("Missing data in url!", status=400)
# Set up an AES object and decrypt the data we received # Set up an AES object and decrypt the data we received
@ -173,7 +173,7 @@ We apologize for the inconvenience.
# Finally, check of we have a data package that tells us where to # Finally, check of we have a data package that tells us where to
# redirect the user. # redirect the user.
if data.has_key('d'): if 'd' in data:
(ivs, datas) = data['d'][0].split('$') (ivs, datas) = data['d'][0].split('$')
decryptor = AES.new(SHA.new(settings.SECRET_KEY).digest()[:16], decryptor = AES.new(SHA.new(settings.SECRET_KEY).digest()[:16],
AES.MODE_CBC, AES.MODE_CBC,
@ -183,7 +183,7 @@ We apologize for the inconvenience.
rdata = urlparse.parse_qs(s, strict_parsing=True) rdata = urlparse.parse_qs(s, strict_parsing=True)
except ValueError: except ValueError:
return HttpResponse("Invalid encrypted data received.", status=400) return HttpResponse("Invalid encrypted data received.", status=400)
if rdata.has_key('r'): if 'r' in rdata:
# Redirect address # Redirect address
return HttpResponseRedirect(rdata['r'][0]) return HttpResponseRedirect(rdata['r'][0])
# No redirect specified, see if we have it in our settings # No redirect specified, see if we have it in our settings

View File

@ -78,7 +78,7 @@ if __name__ == "__main__":
found = False found = False
for p, pinfo in platforms.items(): for p, pinfo in platforms.items():
if pinfo['p'] == familypath and pinfo['f'] == shortdist: if pinfo['p'] == familypath and pinfo['f'] == shortdist:
if not reporpms[v].has_key(p): if p not in reporpms[v]:
reporpms[v][p] = {} reporpms[v][p] = {}
reporpms[v][p][arch] = max(ver, reporpms[v][p].get(arch, 0)) reporpms[v][p][arch] = max(ver, reporpms[v][p].get(arch, 0))
platforms[p]['found'] = True platforms[p]['found'] = True

View File

@ -26,7 +26,7 @@ if __name__ == "__main__":
for l in sys.stdin: for l in sys.stdin:
if l.startswith('templates/'): if l.startswith('templates/'):
tmpl = l[len('templates/'):].strip() tmpl = l[len('templates/'):].strip()
if not tmpl in BANNED_TEMPLATES: if tmpl not in BANNED_TEMPLATES:
curs.execute("SELECT varnish_purge_xkey(%(key)s)", { curs.execute("SELECT varnish_purge_xkey(%(key)s)", {
'key': 'pgwt_{0}'.format(hashlib.md5(tmpl).hexdigest()), 'key': 'pgwt_{0}'.format(hashlib.md5(tmpl).hexdigest()),
}) })

View File

@ -62,7 +62,7 @@ class MultiListCrawler(object):
'month': d.month, 'month': d.month,
}) })
x = curs.fetchall() x = curs.fetchall()
if x[0][0] != None: if x[0][0] is not None:
maxmsg = x[0][0] maxmsg = x[0][0]
else: else:
maxmsg = -1 maxmsg = -1

View File

@ -93,7 +93,7 @@ class BaseSiteCrawler(object):
return False return False
def crawl_page(self, url, relprio, internal): def crawl_page(self, url, relprio, internal):
if self.pages_crawled.has_key(url) or self.pages_crawled.has_key(url + "/"): if url in self.pages_crawled or url + "/" in self.pages_crawled:
return return
if self.exclude_url(url): if self.exclude_url(url):
@ -103,7 +103,7 @@ class BaseSiteCrawler(object):
(result, pagedata, lastmod) = self.fetch_page(url) (result, pagedata, lastmod) = self.fetch_page(url)
if result == 0: if result == 0:
if pagedata == None: if pagedata is None:
# Result ok but no data, means that the page was not modified. # Result ok but no data, means that the page was not modified.
# Thus we can happily consider ourselves done here. # Thus we can happily consider ourselves done here.
return return
@ -184,7 +184,7 @@ class BaseSiteCrawler(object):
h.putrequest("GET", url) h.putrequest("GET", url)
h.putheader("User-agent", "pgsearch/0.2") h.putheader("User-agent", "pgsearch/0.2")
h.putheader("Connection", "close") h.putheader("Connection", "close")
if self.scantimes.has_key(url): if url in self.scantimes:
h.putheader("If-Modified-Since", formatdate(time.mktime(self.scantimes[url].timetuple()))) h.putheader("If-Modified-Since", formatdate(time.mktime(self.scantimes[url].timetuple())))
h.endheaders() h.endheaders()
resp = h.getresponse() resp = h.getresponse()
@ -224,7 +224,7 @@ class BaseSiteCrawler(object):
return datetime.datetime.now() return datetime.datetime.now()
def parse_html(self, page): def parse_html(self, page):
if page == None: if page is None:
return None return None
p = GenericHtmlParser() p = GenericHtmlParser()

View File

@ -46,7 +46,7 @@ class GenericSiteCrawler(BaseSiteCrawler):
def post_process_page(self, url): def post_process_page(self, url):
for l in self.resolve_links(self.page.links, url): for l in self.resolve_links(self.page.links, url):
if self.pages_crawled.has_key(l) or self.pages_crawled.has_key(l + "/"): if l in self.pages_crawled or l + "/" in self.pages_crawled:
continue continue
if self.exclude_url(l): if self.exclude_url(l):
continue continue

View File

@ -85,7 +85,7 @@ class SitemapSiteCrawler(BaseSiteCrawler):
# Advance 8 characters - length of https://. # Advance 8 characters - length of https://.
url = url[len(self.hostname) + 8:] url = url[len(self.hostname) + 8:]
if lastmod: if lastmod:
if self.scantimes.has_key(url): if url in self.scantimes:
if lastmod < self.scantimes[url]: if lastmod < self.scantimes[url]:
# Not modified since last scan, so don't reload # Not modified since last scan, so don't reload
# Stick it in the list of pages we've scanned though, # Stick it in the list of pages we've scanned though,