mirror of
https://github.com/postgres/pgweb.git
synced 2025-08-12 23:05:12 +00:00
Update to new style exception catching
This commit is contained in:
@ -30,7 +30,7 @@ class PgwebAuthenticationForm(AuthenticationForm):
|
|||||||
def clean(self):
|
def clean(self):
|
||||||
try:
|
try:
|
||||||
return super(PgwebAuthenticationForm, self).clean()
|
return super(PgwebAuthenticationForm, self).clean()
|
||||||
except ValueError, e:
|
except ValueError as e:
|
||||||
if e.message.startswith('Unknown password hashing algorithm'):
|
if e.message.startswith('Unknown password hashing algorithm'):
|
||||||
# This is *probably* a user trying to log in with an account that has not
|
# This is *probably* a user trying to log in with an account that has not
|
||||||
# been set up properly yet. It could be an actually unsupported hashing
|
# been set up properly yet. It could be an actually unsupported hashing
|
||||||
|
@ -41,7 +41,7 @@ def _login_oauth(request, provider, authurl, tokenurl, scope, authdatafunc):
|
|||||||
try:
|
try:
|
||||||
(email, firstname, lastname) = authdatafunc(oa)
|
(email, firstname, lastname) = authdatafunc(oa)
|
||||||
email = email.lower()
|
email = email.lower()
|
||||||
except KeyError, e:
|
except KeyError as e:
|
||||||
log.warning("Oauth signing using {0} was missing data: {1}".format(provider, e))
|
log.warning("Oauth signing using {0} was missing data: {1}".format(provider, e))
|
||||||
return HttpResponse('OAuth login was missing critical data. To log in, you need to allow access to email, first name and last name!')
|
return HttpResponse('OAuth login was missing critical data. To log in, you need to allow access to email, first name and last name!')
|
||||||
|
|
||||||
@ -196,8 +196,8 @@ def login_oauth(request, provider):
|
|||||||
if hasattr(m, fn):
|
if hasattr(m, fn):
|
||||||
try:
|
try:
|
||||||
return getattr(m, fn)(request)
|
return getattr(m, fn)(request)
|
||||||
except OAuthException, e:
|
except OAuthException as e:
|
||||||
return HttpResponse(e)
|
return HttpResponse(e)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log.error('Excpetion during OAuth: %s' % e)
|
log.error('Excpetion during OAuth: %s' % e)
|
||||||
return HttpResponse('An unhandled exception occurred during the authentication process')
|
return HttpResponse('An unhandled exception occurred during the authentication process')
|
||||||
|
@ -61,7 +61,7 @@ class ReCaptchaField(forms.CharField):
|
|||||||
'Content-type': 'application/x-www-form-urlencoded',
|
'Content-type': 'application/x-www-form-urlencoded',
|
||||||
})
|
})
|
||||||
c.sock.settimeout(10)
|
c.sock.settimeout(10)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
# Error to connect at TCP level
|
# Error to connect at TCP level
|
||||||
log.error('Failed to connect to google recaptcha API: %s' % e)
|
log.error('Failed to connect to google recaptcha API: %s' % e)
|
||||||
raise ValidationError('Failed in API call to google recaptcha')
|
raise ValidationError('Failed in API call to google recaptcha')
|
||||||
|
@ -50,5 +50,5 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
if fetchedsomething:
|
if fetchedsomething:
|
||||||
importfeed.purge_related()
|
importfeed.purge_related()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print "Failed to load %s: %s" % (importfeed, e)
|
print("Failed to load %s: %s" % (importfeed, e))
|
||||||
|
@ -38,7 +38,7 @@ def ftpbrowser(request, subpath):
|
|||||||
f = open(settings.FTP_PICKLE, "rb")
|
f = open(settings.FTP_PICKLE, "rb")
|
||||||
allnodes = pickle.load(f)
|
allnodes = pickle.load(f)
|
||||||
f.close()
|
f.close()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
return HttpServerError(request, "Failed to load ftp site information: %s" % e)
|
return HttpServerError(request, "Failed to load ftp site information: %s" % e)
|
||||||
|
|
||||||
# An incoming subpath may either be canonical, or have one or more elements
|
# An incoming subpath may either be canonical, or have one or more elements
|
||||||
|
@ -25,7 +25,7 @@ class QueuedMailAdmin(admin.ModelAdmin):
|
|||||||
if b:
|
if b:
|
||||||
return b
|
return b
|
||||||
return "Could not find body"
|
return "Could not find body"
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
return "Failed to get body: %s" % e
|
return "Failed to get body: %s" % e
|
||||||
|
|
||||||
parsed_content.short_description = 'Parsed mail'
|
parsed_content.short_description = 'Parsed mail'
|
||||||
|
@ -46,7 +46,7 @@ def other_vectors_validator(val):
|
|||||||
))
|
))
|
||||||
except ValidationError:
|
except ValidationError:
|
||||||
raise
|
raise
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
raise ValidationError("Failed to parse vectors: %s" % e)
|
raise ValidationError("Failed to parse vectors: %s" % e)
|
||||||
|
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ class MultiListCrawler(object):
|
|||||||
try:
|
try:
|
||||||
if not self.crawl_single_message(listid, listname, year, month, currentmsg):
|
if not self.crawl_single_message(listid, listname, year, month, currentmsg):
|
||||||
break
|
break
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log("Exception when crawling %s/%s/%s/%s - %s" % (
|
log("Exception when crawling %s/%s/%s/%s - %s" % (
|
||||||
listname, year, month, currentmsg, e))
|
listname, year, month, currentmsg, e))
|
||||||
# Continue on to try the next message
|
# Continue on to try the next message
|
||||||
|
@ -85,7 +85,7 @@ class BaseSiteCrawler(object):
|
|||||||
(url, relprio, internal) = self.queue.get()
|
(url, relprio, internal) = self.queue.get()
|
||||||
try:
|
try:
|
||||||
self.crawl_page(url, relprio, internal)
|
self.crawl_page(url, relprio, internal)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log("Exception crawling '%s': %s" % (url, e))
|
log("Exception crawling '%s': %s" % (url, e))
|
||||||
self.queue.task_done()
|
self.queue.task_done()
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ class BaseSiteCrawler(object):
|
|||||||
pagedata = lossy_unicode(pagedata)
|
pagedata = lossy_unicode(pagedata)
|
||||||
try:
|
try:
|
||||||
self.page = self.parse_html(pagedata)
|
self.page = self.parse_html(pagedata)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log("Failed to parse HTML for %s" % url)
|
log("Failed to parse HTML for %s" % url)
|
||||||
log(e)
|
log(e)
|
||||||
return
|
return
|
||||||
@ -213,7 +213,7 @@ class BaseSiteCrawler(object):
|
|||||||
else:
|
else:
|
||||||
# print "Url %s returned status %s" % (url, resp.status)
|
# print "Url %s returned status %s" % (url, resp.status)
|
||||||
pass
|
pass
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log("Exception when loading url %s: %s" % (url, e))
|
log("Exception when loading url %s: %s" % (url, e))
|
||||||
return (2, None, None)
|
return (2, None, None)
|
||||||
|
|
||||||
|
@ -15,9 +15,9 @@ def threadwrapper(func, *args):
|
|||||||
# forcibly terminate the child.
|
# forcibly terminate the child.
|
||||||
try:
|
try:
|
||||||
p.join()
|
p.join()
|
||||||
except KeyboardInterrupt, e:
|
except KeyboardInterrupt as e:
|
||||||
print "Keyboard interrupt, terminating child process!"
|
print "Keyboard interrupt, terminating child process!"
|
||||||
p.terminate()
|
p.terminate()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print "Exception %s, terminating child process!" % e
|
print "Exception %s, terminating child process!" % e
|
||||||
p.terminate()
|
p.terminate()
|
||||||
|
@ -26,7 +26,7 @@ def do_purge(consumername, headers):
|
|||||||
return True
|
return True
|
||||||
logging.warning("Varnish purge on %s returned status %s (%s)" % (consumername, resp.status, resp.reason))
|
logging.warning("Varnish purge on %s returned status %s (%s)" % (consumername, resp.status, resp.reason))
|
||||||
return False
|
return False
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
logging.error("Exception purging on %s: %s" % (consumername, ex))
|
logging.error("Exception purging on %s: %s" % (consumername, ex))
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
Reference in New Issue
Block a user