mirror of
https://github.com/postgres/pgweb.git
synced 2025-08-13 13:12:42 +00:00
Update to new style exception catching
This commit is contained in:
@ -30,7 +30,7 @@ class PgwebAuthenticationForm(AuthenticationForm):
|
||||
def clean(self):
|
||||
try:
|
||||
return super(PgwebAuthenticationForm, self).clean()
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
if e.message.startswith('Unknown password hashing algorithm'):
|
||||
# This is *probably* a user trying to log in with an account that has not
|
||||
# been set up properly yet. It could be an actually unsupported hashing
|
||||
|
@ -41,7 +41,7 @@ def _login_oauth(request, provider, authurl, tokenurl, scope, authdatafunc):
|
||||
try:
|
||||
(email, firstname, lastname) = authdatafunc(oa)
|
||||
email = email.lower()
|
||||
except KeyError, e:
|
||||
except KeyError as e:
|
||||
log.warning("Oauth signing using {0} was missing data: {1}".format(provider, e))
|
||||
return HttpResponse('OAuth login was missing critical data. To log in, you need to allow access to email, first name and last name!')
|
||||
|
||||
@ -196,8 +196,8 @@ def login_oauth(request, provider):
|
||||
if hasattr(m, fn):
|
||||
try:
|
||||
return getattr(m, fn)(request)
|
||||
except OAuthException, e:
|
||||
except OAuthException as e:
|
||||
return HttpResponse(e)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log.error('Excpetion during OAuth: %s' % e)
|
||||
return HttpResponse('An unhandled exception occurred during the authentication process')
|
||||
|
@ -61,7 +61,7 @@ class ReCaptchaField(forms.CharField):
|
||||
'Content-type': 'application/x-www-form-urlencoded',
|
||||
})
|
||||
c.sock.settimeout(10)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# Error to connect at TCP level
|
||||
log.error('Failed to connect to google recaptcha API: %s' % e)
|
||||
raise ValidationError('Failed in API call to google recaptcha')
|
||||
|
@ -50,5 +50,5 @@ class Command(BaseCommand):
|
||||
|
||||
if fetchedsomething:
|
||||
importfeed.purge_related()
|
||||
except Exception, e:
|
||||
print "Failed to load %s: %s" % (importfeed, e)
|
||||
except Exception as e:
|
||||
print("Failed to load %s: %s" % (importfeed, e))
|
||||
|
@ -38,7 +38,7 @@ def ftpbrowser(request, subpath):
|
||||
f = open(settings.FTP_PICKLE, "rb")
|
||||
allnodes = pickle.load(f)
|
||||
f.close()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
return HttpServerError(request, "Failed to load ftp site information: %s" % e)
|
||||
|
||||
# An incoming subpath may either be canonical, or have one or more elements
|
||||
|
@ -25,7 +25,7 @@ class QueuedMailAdmin(admin.ModelAdmin):
|
||||
if b:
|
||||
return b
|
||||
return "Could not find body"
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
return "Failed to get body: %s" % e
|
||||
|
||||
parsed_content.short_description = 'Parsed mail'
|
||||
|
@ -46,7 +46,7 @@ def other_vectors_validator(val):
|
||||
))
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise ValidationError("Failed to parse vectors: %s" % e)
|
||||
|
||||
|
||||
|
@ -116,7 +116,7 @@ class MultiListCrawler(object):
|
||||
try:
|
||||
if not self.crawl_single_message(listid, listname, year, month, currentmsg):
|
||||
break
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log("Exception when crawling %s/%s/%s/%s - %s" % (
|
||||
listname, year, month, currentmsg, e))
|
||||
# Continue on to try the next message
|
||||
|
@ -85,7 +85,7 @@ class BaseSiteCrawler(object):
|
||||
(url, relprio, internal) = self.queue.get()
|
||||
try:
|
||||
self.crawl_page(url, relprio, internal)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log("Exception crawling '%s': %s" % (url, e))
|
||||
self.queue.task_done()
|
||||
|
||||
@ -127,7 +127,7 @@ class BaseSiteCrawler(object):
|
||||
pagedata = lossy_unicode(pagedata)
|
||||
try:
|
||||
self.page = self.parse_html(pagedata)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log("Failed to parse HTML for %s" % url)
|
||||
log(e)
|
||||
return
|
||||
@ -213,7 +213,7 @@ class BaseSiteCrawler(object):
|
||||
else:
|
||||
# print "Url %s returned status %s" % (url, resp.status)
|
||||
pass
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log("Exception when loading url %s: %s" % (url, e))
|
||||
return (2, None, None)
|
||||
|
||||
|
@ -15,9 +15,9 @@ def threadwrapper(func, *args):
|
||||
# forcibly terminate the child.
|
||||
try:
|
||||
p.join()
|
||||
except KeyboardInterrupt, e:
|
||||
except KeyboardInterrupt as e:
|
||||
print "Keyboard interrupt, terminating child process!"
|
||||
p.terminate()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
print "Exception %s, terminating child process!" % e
|
||||
p.terminate()
|
||||
|
@ -26,7 +26,7 @@ def do_purge(consumername, headers):
|
||||
return True
|
||||
logging.warning("Varnish purge on %s returned status %s (%s)" % (consumername, resp.status, resp.reason))
|
||||
return False
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
logging.error("Exception purging on %s: %s" % (consumername, ex))
|
||||
return False
|
||||
return True
|
||||
|
Reference in New Issue
Block a user