From 142f0805c2a8aa865f88bffdd6fbe4d76397ad03 Mon Sep 17 00:00:00 2001 From: Magnus Hagander Date: Sat, 19 Jan 2019 19:34:14 +0100 Subject: [PATCH] Update to new style exception catching --- pgweb/account/forms.py | 2 +- pgweb/account/oauthclient.py | 6 +++--- pgweb/account/recaptcha.py | 2 +- pgweb/core/management/commands/fetch_rss_feeds.py | 4 ++-- pgweb/downloads/views.py | 2 +- pgweb/mailqueue/admin.py | 2 +- pgweb/security/models.py | 2 +- tools/search/crawler/lib/archives.py | 2 +- tools/search/crawler/lib/basecrawler.py | 6 +++--- tools/search/crawler/lib/threadwrapper.py | 4 ++-- tools/varnishqueue/varnish_queue.py | 2 +- 11 files changed, 17 insertions(+), 17 deletions(-) diff --git a/pgweb/account/forms.py b/pgweb/account/forms.py index 712b5142..ff3c2eb4 100644 --- a/pgweb/account/forms.py +++ b/pgweb/account/forms.py @@ -30,7 +30,7 @@ class PgwebAuthenticationForm(AuthenticationForm): def clean(self): try: return super(PgwebAuthenticationForm, self).clean() - except ValueError, e: + except ValueError as e: if e.message.startswith('Unknown password hashing algorithm'): # This is *probably* a user trying to log in with an account that has not # been set up properly yet. It could be an actually unsupported hashing diff --git a/pgweb/account/oauthclient.py b/pgweb/account/oauthclient.py index a287081d..808d284b 100644 --- a/pgweb/account/oauthclient.py +++ b/pgweb/account/oauthclient.py @@ -41,7 +41,7 @@ def _login_oauth(request, provider, authurl, tokenurl, scope, authdatafunc): try: (email, firstname, lastname) = authdatafunc(oa) email = email.lower() - except KeyError, e: + except KeyError as e: log.warning("Oauth signing using {0} was missing data: {1}".format(provider, e)) return HttpResponse('OAuth login was missing critical data. To log in, you need to allow access to email, first name and last name!') @@ -196,8 +196,8 @@ def login_oauth(request, provider): if hasattr(m, fn): try: return getattr(m, fn)(request) - except OAuthException, e: + except OAuthException as e: return HttpResponse(e) - except Exception, e: + except Exception as e: log.error('Excpetion during OAuth: %s' % e) return HttpResponse('An unhandled exception occurred during the authentication process') diff --git a/pgweb/account/recaptcha.py b/pgweb/account/recaptcha.py index f21df56a..51dbfe40 100644 --- a/pgweb/account/recaptcha.py +++ b/pgweb/account/recaptcha.py @@ -61,7 +61,7 @@ class ReCaptchaField(forms.CharField): 'Content-type': 'application/x-www-form-urlencoded', }) c.sock.settimeout(10) - except Exception, e: + except Exception as e: # Error to connect at TCP level log.error('Failed to connect to google recaptcha API: %s' % e) raise ValidationError('Failed in API call to google recaptcha') diff --git a/pgweb/core/management/commands/fetch_rss_feeds.py b/pgweb/core/management/commands/fetch_rss_feeds.py index 39bfc9c7..afe5a65b 100644 --- a/pgweb/core/management/commands/fetch_rss_feeds.py +++ b/pgweb/core/management/commands/fetch_rss_feeds.py @@ -50,5 +50,5 @@ class Command(BaseCommand): if fetchedsomething: importfeed.purge_related() - except Exception, e: - print "Failed to load %s: %s" % (importfeed, e) + except Exception as e: + print("Failed to load %s: %s" % (importfeed, e)) diff --git a/pgweb/downloads/views.py b/pgweb/downloads/views.py index deb29593..943c866c 100644 --- a/pgweb/downloads/views.py +++ b/pgweb/downloads/views.py @@ -38,7 +38,7 @@ def ftpbrowser(request, subpath): f = open(settings.FTP_PICKLE, "rb") allnodes = pickle.load(f) f.close() - except Exception, e: + except Exception as e: return HttpServerError(request, "Failed to load ftp site information: %s" % e) # An incoming subpath may either be canonical, or have one or more elements diff --git a/pgweb/mailqueue/admin.py b/pgweb/mailqueue/admin.py index b99a3b63..46a6aa1f 100644 --- a/pgweb/mailqueue/admin.py +++ b/pgweb/mailqueue/admin.py @@ -25,7 +25,7 @@ class QueuedMailAdmin(admin.ModelAdmin): if b: return b return "Could not find body" - except Exception, e: + except Exception as e: return "Failed to get body: %s" % e parsed_content.short_description = 'Parsed mail' diff --git a/pgweb/security/models.py b/pgweb/security/models.py index cde84ba0..6d88282c 100644 --- a/pgweb/security/models.py +++ b/pgweb/security/models.py @@ -46,7 +46,7 @@ def other_vectors_validator(val): )) except ValidationError: raise - except Exception, e: + except Exception as e: raise ValidationError("Failed to parse vectors: %s" % e) diff --git a/tools/search/crawler/lib/archives.py b/tools/search/crawler/lib/archives.py index ab1a5f24..1e42a8ba 100644 --- a/tools/search/crawler/lib/archives.py +++ b/tools/search/crawler/lib/archives.py @@ -116,7 +116,7 @@ class MultiListCrawler(object): try: if not self.crawl_single_message(listid, listname, year, month, currentmsg): break - except Exception, e: + except Exception as e: log("Exception when crawling %s/%s/%s/%s - %s" % ( listname, year, month, currentmsg, e)) # Continue on to try the next message diff --git a/tools/search/crawler/lib/basecrawler.py b/tools/search/crawler/lib/basecrawler.py index 7bd3c4d3..aeae20b8 100644 --- a/tools/search/crawler/lib/basecrawler.py +++ b/tools/search/crawler/lib/basecrawler.py @@ -85,7 +85,7 @@ class BaseSiteCrawler(object): (url, relprio, internal) = self.queue.get() try: self.crawl_page(url, relprio, internal) - except Exception, e: + except Exception as e: log("Exception crawling '%s': %s" % (url, e)) self.queue.task_done() @@ -127,7 +127,7 @@ class BaseSiteCrawler(object): pagedata = lossy_unicode(pagedata) try: self.page = self.parse_html(pagedata) - except Exception, e: + except Exception as e: log("Failed to parse HTML for %s" % url) log(e) return @@ -213,7 +213,7 @@ class BaseSiteCrawler(object): else: # print "Url %s returned status %s" % (url, resp.status) pass - except Exception, e: + except Exception as e: log("Exception when loading url %s: %s" % (url, e)) return (2, None, None) diff --git a/tools/search/crawler/lib/threadwrapper.py b/tools/search/crawler/lib/threadwrapper.py index 87c70265..e305dffc 100644 --- a/tools/search/crawler/lib/threadwrapper.py +++ b/tools/search/crawler/lib/threadwrapper.py @@ -15,9 +15,9 @@ def threadwrapper(func, *args): # forcibly terminate the child. try: p.join() - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: print "Keyboard interrupt, terminating child process!" p.terminate() - except Exception, e: + except Exception as e: print "Exception %s, terminating child process!" % e p.terminate() diff --git a/tools/varnishqueue/varnish_queue.py b/tools/varnishqueue/varnish_queue.py index 31096759..70749398 100755 --- a/tools/varnishqueue/varnish_queue.py +++ b/tools/varnishqueue/varnish_queue.py @@ -26,7 +26,7 @@ def do_purge(consumername, headers): return True logging.warning("Varnish purge on %s returned status %s (%s)" % (consumername, resp.status, resp.reason)) return False - except Exception, ex: + except Exception as ex: logging.error("Exception purging on %s: %s" % (consumername, ex)) return False return True