From 87237f6536a1e6df112ca34818cf8459cb04fc68 Mon Sep 17 00:00:00 2001 From: Magnus Hagander Date: Thu, 17 Jan 2019 15:30:25 +0100 Subject: [PATCH] Tabs, meet your new overlords: spaces In a quest to reach pep8, use spaces to indent rather than tabs. --- pgweb/account/admin.py | 58 +- pgweb/account/forms.py | 214 ++--- .../migrations/0002_lowercase_email.py | 6 +- pgweb/account/models.py | 50 +- pgweb/account/oauthclient.py | 272 +++--- pgweb/account/recaptcha.py | 120 +-- pgweb/account/urls.py | 76 +- pgweb/account/views.py | 876 +++++++++--------- pgweb/contributors/admin.py | 22 +- pgweb/contributors/models.py | 50 +- pgweb/contributors/struct.py | 2 +- pgweb/contributors/views.py | 8 +- pgweb/core/admin.py | 34 +- pgweb/core/feeds.py | 22 +- pgweb/core/forms.py | 90 +- pgweb/core/lookups.py | 26 +- .../commands/cleanup_old_records.py | 26 +- .../management/commands/fetch_rss_feeds.py | 62 +- .../management/commands/moderation_report.py | 26 +- pgweb/core/management/commands/sessioninfo.py | 52 +- pgweb/core/models.py | 290 +++--- pgweb/core/struct.py | 24 +- pgweb/core/templatetags/pgfilters.py | 40 +- pgweb/core/views.py | 416 ++++----- pgweb/docs/forms.py | 10 +- pgweb/docs/migrations/0003_docs_alias.py | 2 +- pgweb/docs/models.py | 46 +- pgweb/docs/struct.py | 74 +- pgweb/docs/views.py | 256 ++--- pgweb/downloads/admin.py | 62 +- pgweb/downloads/forms.py | 16 +- pgweb/downloads/models.py | 130 +-- pgweb/downloads/struct.py | 12 +- pgweb/downloads/views.py | 356 +++---- pgweb/events/admin.py | 46 +- pgweb/events/feeds.py | 20 +- pgweb/events/forms.py | 76 +- pgweb/events/models.py | 108 +-- pgweb/events/struct.py | 22 +- pgweb/events/views.py | 54 +- pgweb/featurematrix/admin.py | 14 +- .../migrations/0002_featurematrix_96.py | 2 +- .../migrations/0003_feature_v10.py | 2 +- pgweb/featurematrix/models.py | 86 +- pgweb/featurematrix/struct.py | 2 +- pgweb/featurematrix/views.py | 50 +- pgweb/legacyurl/views.py | 4 +- pgweb/lists/management/commands/sync_lists.py | 86 +- pgweb/lists/models.py | 50 +- pgweb/lists/struct.py | 2 +- pgweb/lists/views.py | 32 +- pgweb/mailqueue/admin.py | 36 +- .../management/commands/send_queued_mail.py | 58 +- pgweb/mailqueue/models.py | 20 +- pgweb/mailqueue/util.py | 76 +- pgweb/misc/forms.py | 48 +- pgweb/misc/models.py | 8 +- pgweb/misc/views.py | 108 +-- pgweb/news/admin.py | 24 +- pgweb/news/feeds.py | 30 +- pgweb/news/forms.py | 40 +- .../news/management/commands/twitter_post.py | 52 +- .../management/commands/twitter_register.py | 50 +- pgweb/news/models.py | 74 +- pgweb/news/struct.py | 22 +- pgweb/news/views.py | 46 +- pgweb/profserv/admin.py | 6 +- pgweb/profserv/forms.py | 16 +- pgweb/profserv/models.py | 62 +- pgweb/profserv/struct.py | 6 +- pgweb/profserv/views.py | 60 +- pgweb/pugs/admin.py | 6 +- pgweb/pugs/models.py | 28 +- pgweb/pugs/views.py | 30 +- pgweb/quotes/admin.py | 2 +- pgweb/quotes/models.py | 24 +- pgweb/search/views.py | 538 +++++------ pgweb/security/admin.py | 84 +- .../management/commands/update_cve_links.py | 32 +- pgweb/security/migrations/0002_cve_visible.py | 2 +- pgweb/security/models.py | 164 ++-- pgweb/security/views.py | 30 +- pgweb/settings.py | 52 +- pgweb/sponsors/models.py | 62 +- pgweb/sponsors/struct.py | 4 +- pgweb/sponsors/views.py | 16 +- pgweb/survey/admin.py | 8 +- pgweb/survey/models.py | 148 +-- pgweb/survey/views.py | 72 +- pgweb/urls.py | 206 ++-- pgweb/util/admin.py | 162 ++-- pgweb/util/auth.py | 26 +- pgweb/util/contexts.py | 224 ++--- pgweb/util/decorators.py | 74 +- pgweb/util/helpers.py | 144 +-- pgweb/util/middleware.py | 88 +- pgweb/util/misc.py | 132 +-- pgweb/util/moderation.py | 28 +- pgweb/util/signals.py | 268 +++--- pgweb/util/sitestruct.py | 34 +- pgweb/util/templateloader.py | 14 +- tools/communityauth/generate_cryptkey.py | 16 +- tools/communityauth/sample/django/auth.py | 292 +++--- tools/communityauth/test_auth.py | 88 +- tools/docs/docload.py | 122 +-- tools/ftp/spider_ftp.py | 110 +-- tools/ftp/spider_yum.py | 152 +-- tools/localhtmlvalidate/localhtmlvalidate.py | 130 +-- tools/purgehook/purgehook.py | 40 +- tools/search/crawler/lib/archives.py | 284 +++--- tools/search/crawler/lib/basecrawler.py | 432 ++++----- tools/search/crawler/lib/genericsite.py | 76 +- tools/search/crawler/lib/log.py | 2 +- tools/search/crawler/lib/parsers.py | 264 +++--- tools/search/crawler/lib/sitemapsite.py | 162 ++-- tools/search/crawler/lib/threadwrapper.py | 24 +- tools/search/crawler/listcrawler.py | 72 +- tools/search/crawler/listsync.py | 68 +- tools/search/crawler/webcrawler.py | 38 +- tools/varnishqueue/nagios_check.py | 44 +- tools/varnishqueue/varnish_queue.py | 220 ++--- 121 files changed, 5331 insertions(+), 5331 deletions(-) diff --git a/pgweb/account/admin.py b/pgweb/account/admin.py index 257b0a94..6127db3c 100644 --- a/pgweb/account/admin.py +++ b/pgweb/account/admin.py @@ -9,44 +9,44 @@ import base64 from models import CommunityAuthSite, CommunityAuthOrg class CommunityAuthSiteAdminForm(forms.ModelForm): - class Meta: - model = CommunityAuthSite - exclude = () + class Meta: + model = CommunityAuthSite + exclude = () - def clean_cryptkey(self): - x = None - try: - x = base64.b64decode(self.cleaned_data['cryptkey']) - except TypeError: - raise forms.ValidationError("Crypto key must be base64 encoded") + def clean_cryptkey(self): + x = None + try: + x = base64.b64decode(self.cleaned_data['cryptkey']) + except TypeError: + raise forms.ValidationError("Crypto key must be base64 encoded") - if (len(x) != 16 and len(x) != 24 and len(x) != 32): - raise forms.ValidationError("Crypto key must be 16, 24 or 32 bytes before being base64-encoded") - return self.cleaned_data['cryptkey'] + if (len(x) != 16 and len(x) != 24 and len(x) != 32): + raise forms.ValidationError("Crypto key must be 16, 24 or 32 bytes before being base64-encoded") + return self.cleaned_data['cryptkey'] class CommunityAuthSiteAdmin(admin.ModelAdmin): - form = CommunityAuthSiteAdminForm + form = CommunityAuthSiteAdminForm class PGUserChangeForm(UserChangeForm): - """just like UserChangeForm, butremoves "username" requirement""" - def __init__(self, *args, **kwargs): - super(PGUserChangeForm, self).__init__(*args, **kwargs) - # because the auth.User model is set to "blank=False" and the Django - # auth.UserChangeForm is setup as a ModelForm, it will always validate - # the "username" even though it is not present. Thus the best way to - # avoid the validation is to remove the "username" field, if it exists - if self.fields.get('username'): - del self.fields['username'] + """just like UserChangeForm, butremoves "username" requirement""" + def __init__(self, *args, **kwargs): + super(PGUserChangeForm, self).__init__(*args, **kwargs) + # because the auth.User model is set to "blank=False" and the Django + # auth.UserChangeForm is setup as a ModelForm, it will always validate + # the "username" even though it is not present. Thus the best way to + # avoid the validation is to remove the "username" field, if it exists + if self.fields.get('username'): + del self.fields['username'] class PGUserAdmin(UserAdmin): - """overrides default Django user admin""" - form = PGUserChangeForm + """overrides default Django user admin""" + form = PGUserChangeForm - def get_readonly_fields(self, request, obj=None): - """this prevents users from changing a username once created""" - if obj: - return self.readonly_fields + ('username',) - return self.readonly_fields + def get_readonly_fields(self, request, obj=None): + """this prevents users from changing a username once created""" + if obj: + return self.readonly_fields + ('username',) + return self.readonly_fields admin.site.register(CommunityAuthSite, CommunityAuthSiteAdmin) admin.site.register(CommunityAuthOrg) diff --git a/pgweb/account/forms.py b/pgweb/account/forms.py index 75c904ee..d7b67d32 100644 --- a/pgweb/account/forms.py +++ b/pgweb/account/forms.py @@ -13,148 +13,148 @@ import logging log = logging.getLogger(__name__) def _clean_username(username): - username = username.lower() + username = username.lower() - if not re.match('^[a-z0-9\.-]+$', username): - raise forms.ValidationError("Invalid character in user name. Only a-z, 0-9, . and - allowed for compatibility with third party software.") - try: - User.objects.get(username=username) - except User.DoesNotExist: - return username - raise forms.ValidationError("This username is already in use") + if not re.match('^[a-z0-9\.-]+$', username): + raise forms.ValidationError("Invalid character in user name. Only a-z, 0-9, . and - allowed for compatibility with third party software.") + try: + User.objects.get(username=username) + except User.DoesNotExist: + return username + raise forms.ValidationError("This username is already in use") # Override some error handling only in the default authentication form class PgwebAuthenticationForm(AuthenticationForm): - def clean(self): - try: - return super(PgwebAuthenticationForm, self).clean() - except ValueError, e: - if e.message.startswith('Unknown password hashing algorithm'): - # This is *probably* a user trying to log in with an account that has not - # been set up properly yet. It could be an actually unsupported hashing - # algorithm, but we'll deal with that when we get there. - self._errors["__all__"] = self.error_class(["This account appears not to be properly initialized. Make sure you complete the signup process with the instructions in the email received before trying to use the account."]) - log.warning("User {0} tried to log in with invalid hash, probably because signup was completed.".format(self.cleaned_data['username'])) - return self.cleaned_data - raise e + def clean(self): + try: + return super(PgwebAuthenticationForm, self).clean() + except ValueError, e: + if e.message.startswith('Unknown password hashing algorithm'): + # This is *probably* a user trying to log in with an account that has not + # been set up properly yet. It could be an actually unsupported hashing + # algorithm, but we'll deal with that when we get there. + self._errors["__all__"] = self.error_class(["This account appears not to be properly initialized. Make sure you complete the signup process with the instructions in the email received before trying to use the account."]) + log.warning("User {0} tried to log in with invalid hash, probably because signup was completed.".format(self.cleaned_data['username'])) + return self.cleaned_data + raise e class CommunityAuthConsentForm(forms.Form): - consent = forms.BooleanField(help_text='Consent to sharing this data') - next = forms.CharField(widget=forms.widgets.HiddenInput()) + consent = forms.BooleanField(help_text='Consent to sharing this data') + next = forms.CharField(widget=forms.widgets.HiddenInput()) - def __init__(self, orgname, *args, **kwargs): - self.orgname = orgname - super(CommunityAuthConsentForm, self).__init__(*args, **kwargs) + def __init__(self, orgname, *args, **kwargs): + self.orgname = orgname + super(CommunityAuthConsentForm, self).__init__(*args, **kwargs) - self.fields['consent'].label = 'Consent to sharing data with {0}'.format(self.orgname) + self.fields['consent'].label = 'Consent to sharing data with {0}'.format(self.orgname) class SignupForm(forms.Form): - username = forms.CharField(max_length=30) - first_name = forms.CharField(max_length=30) - last_name = forms.CharField(max_length=30) - email = forms.EmailField() - email2 = forms.EmailField(label="Repeat email") - captcha = ReCaptchaField() + username = forms.CharField(max_length=30) + first_name = forms.CharField(max_length=30) + last_name = forms.CharField(max_length=30) + email = forms.EmailField() + email2 = forms.EmailField(label="Repeat email") + captcha = ReCaptchaField() - def __init__(self, remoteip, *args, **kwargs): - super(SignupForm, self).__init__(*args, **kwargs) - self.fields['captcha'].set_ip(remoteip) + def __init__(self, remoteip, *args, **kwargs): + super(SignupForm, self).__init__(*args, **kwargs) + self.fields['captcha'].set_ip(remoteip) - def clean_email2(self): - # If the primary email checker had an exception, the data will be gone - # from the cleaned_data structure - if not self.cleaned_data.has_key('email'): - return self.cleaned_data['email2'] - email1 = self.cleaned_data['email'].lower() - email2 = self.cleaned_data['email2'].lower() + def clean_email2(self): + # If the primary email checker had an exception, the data will be gone + # from the cleaned_data structure + if not self.cleaned_data.has_key('email'): + return self.cleaned_data['email2'] + email1 = self.cleaned_data['email'].lower() + email2 = self.cleaned_data['email2'].lower() - if email1 != email2: - raise forms.ValidationError("Email addresses don't match") - return email2 + if email1 != email2: + raise forms.ValidationError("Email addresses don't match") + return email2 - def clean_username(self): - return _clean_username(self.cleaned_data['username']) + def clean_username(self): + return _clean_username(self.cleaned_data['username']) - def clean_email(self): - email = self.cleaned_data['email'].lower() + def clean_email(self): + email = self.cleaned_data['email'].lower() - try: - User.objects.get(email=email) - except User.DoesNotExist: - return email - raise forms.ValidationError("A user with this email address is already registered") + try: + User.objects.get(email=email) + except User.DoesNotExist: + return email + raise forms.ValidationError("A user with this email address is already registered") class SignupOauthForm(forms.Form): - username = forms.CharField(max_length=30) - first_name = forms.CharField(max_length=30, required=False) - last_name = forms.CharField(max_length=30, required=False) - email = forms.EmailField() - captcha = ReCaptchaField() + username = forms.CharField(max_length=30) + first_name = forms.CharField(max_length=30, required=False) + last_name = forms.CharField(max_length=30, required=False) + email = forms.EmailField() + captcha = ReCaptchaField() - def __init__(self, *args, **kwargs): - super(SignupOauthForm, self).__init__(*args, **kwargs) - self.fields['first_name'].widget.attrs['readonly'] = True - self.fields['first_name'].widget.attrs['disabled'] = True - self.fields['last_name'].widget.attrs['readonly'] = True - self.fields['last_name'].widget.attrs['disabled'] = True - self.fields['email'].widget.attrs['readonly'] = True - self.fields['email'].widget.attrs['disabled'] = True + def __init__(self, *args, **kwargs): + super(SignupOauthForm, self).__init__(*args, **kwargs) + self.fields['first_name'].widget.attrs['readonly'] = True + self.fields['first_name'].widget.attrs['disabled'] = True + self.fields['last_name'].widget.attrs['readonly'] = True + self.fields['last_name'].widget.attrs['disabled'] = True + self.fields['email'].widget.attrs['readonly'] = True + self.fields['email'].widget.attrs['disabled'] = True - def clean_username(self): - return _clean_username(self.cleaned_data['username']) + def clean_username(self): + return _clean_username(self.cleaned_data['username']) - def clean_email(self): - return self.cleaned_data['email'].lower() + def clean_email(self): + return self.cleaned_data['email'].lower() class UserProfileForm(forms.ModelForm): - class Meta: - model = UserProfile - exclude = ('user',) + class Meta: + model = UserProfile + exclude = ('user',) class UserForm(forms.ModelForm): - def __init__(self, *args, **kwargs): - super(UserForm, self).__init__(*args, **kwargs) - self.fields['first_name'].required = True - self.fields['last_name'].required = True - class Meta: - model = User - fields = ('first_name', 'last_name', ) + def __init__(self, *args, **kwargs): + super(UserForm, self).__init__(*args, **kwargs) + self.fields['first_name'].required = True + self.fields['last_name'].required = True + class Meta: + model = User + fields = ('first_name', 'last_name', ) class ContributorForm(forms.ModelForm): - class Meta: - model = Contributor - exclude = ('ctype', 'lastname', 'firstname', 'user', ) + class Meta: + model = Contributor + exclude = ('ctype', 'lastname', 'firstname', 'user', ) class ChangeEmailForm(forms.Form): - email = forms.EmailField() - email2 = forms.EmailField(label="Repeat email") + email = forms.EmailField() + email2 = forms.EmailField(label="Repeat email") - def __init__(self, user, *args, **kwargs): - super(ChangeEmailForm, self).__init__(*args, **kwargs) - self.user = user + def __init__(self, user, *args, **kwargs): + super(ChangeEmailForm, self).__init__(*args, **kwargs) + self.user = user - def clean_email(self): - email = self.cleaned_data['email'].lower() + def clean_email(self): + email = self.cleaned_data['email'].lower() - if email == self.user.email: - raise forms.ValidationError("This is your existing email address!") + if email == self.user.email: + raise forms.ValidationError("This is your existing email address!") - if User.objects.filter(email=email).exists(): - raise forms.ValidationError("A user with this email address is already registered") + if User.objects.filter(email=email).exists(): + raise forms.ValidationError("A user with this email address is already registered") - return email + return email - def clean_email2(self): - # If the primary email checker had an exception, the data will be gone - # from the cleaned_data structure - if not self.cleaned_data.has_key('email'): - return self.cleaned_data['email2'].lower() - email1 = self.cleaned_data['email'].lower() - email2 = self.cleaned_data['email2'].lower() + def clean_email2(self): + # If the primary email checker had an exception, the data will be gone + # from the cleaned_data structure + if not self.cleaned_data.has_key('email'): + return self.cleaned_data['email2'].lower() + email1 = self.cleaned_data['email'].lower() + email2 = self.cleaned_data['email2'].lower() - if email1 != email2: - raise forms.ValidationError("Email addresses don't match") - return email2 + if email1 != email2: + raise forms.ValidationError("Email addresses don't match") + return email2 class PgwebPasswordResetForm(forms.Form): - email = forms.EmailField() + email = forms.EmailField() diff --git a/pgweb/account/migrations/0002_lowercase_email.py b/pgweb/account/migrations/0002_lowercase_email.py index 4a40849f..e4d46ab1 100644 --- a/pgweb/account/migrations/0002_lowercase_email.py +++ b/pgweb/account/migrations/0002_lowercase_email.py @@ -11,7 +11,7 @@ class Migration(migrations.Migration): ] operations = [ - migrations.RunSQL("UPDATE auth_user SET email=lower(email) WHERE email!=lower(email)"), - migrations.RunSQL("ALTER TABLE auth_user ADD CONSTRAINT email_must_be_lowercase CHECK (email=lower(email))"), - migrations.RunSQL("CREATE UNIQUE INDEX auth_user_email_lower_key ON auth_user USING btree(lower(email))"), + migrations.RunSQL("UPDATE auth_user SET email=lower(email) WHERE email!=lower(email)"), + migrations.RunSQL("ALTER TABLE auth_user ADD CONSTRAINT email_must_be_lowercase CHECK (email=lower(email))"), + migrations.RunSQL("CREATE UNIQUE INDEX auth_user_email_lower_key ON auth_user USING btree(lower(email))"), ] diff --git a/pgweb/account/models.py b/pgweb/account/models.py index 269430eb..0c4de48b 100644 --- a/pgweb/account/models.py +++ b/pgweb/account/models.py @@ -2,37 +2,37 @@ from django.db import models from django.contrib.auth.models import User class CommunityAuthOrg(models.Model): - orgname = models.CharField(max_length=100, null=False, blank=False, - help_text="Name of the organisation") - require_consent = models.BooleanField(null=False, blank=False, default=True) + orgname = models.CharField(max_length=100, null=False, blank=False, + help_text="Name of the organisation") + require_consent = models.BooleanField(null=False, blank=False, default=True) - def __unicode__(self): - return self.orgname + def __unicode__(self): + return self.orgname class CommunityAuthSite(models.Model): - name = models.CharField(max_length=100, null=False, blank=False, - help_text="Note that the value in this field is shown on the login page, so make sure it's user-friendly!") - redirecturl = models.URLField(max_length=200, null=False, blank=False) - cryptkey = models.CharField(max_length=100, null=False, blank=False, - help_text="Use tools/communityauth/generate_cryptkey.py to create a key") - comment = models.TextField(null=False, blank=True) - org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False) - cooloff_hours = models.IntegerField(null=False, blank=False, default=0, - help_text="Number of hours a user must have existed in the systems before allowed to log in to this site") + name = models.CharField(max_length=100, null=False, blank=False, + help_text="Note that the value in this field is shown on the login page, so make sure it's user-friendly!") + redirecturl = models.URLField(max_length=200, null=False, blank=False) + cryptkey = models.CharField(max_length=100, null=False, blank=False, + help_text="Use tools/communityauth/generate_cryptkey.py to create a key") + comment = models.TextField(null=False, blank=True) + org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False) + cooloff_hours = models.IntegerField(null=False, blank=False, default=0, + help_text="Number of hours a user must have existed in the systems before allowed to log in to this site") - def __unicode__(self): - return self.name + def __unicode__(self): + return self.name class CommunityAuthConsent(models.Model): - user = models.ForeignKey(User, null=False, blank=False) - org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False) - consentgiven = models.DateTimeField(null=False, blank=False) + user = models.ForeignKey(User, null=False, blank=False) + org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False) + consentgiven = models.DateTimeField(null=False, blank=False) - class Meta: - unique_together = (('user', 'org'), ) + class Meta: + unique_together = (('user', 'org'), ) class EmailChangeToken(models.Model): - user = models.OneToOneField(User, null=False, blank=False) - email = models.EmailField(max_length=75, null=False, blank=False) - token = models.CharField(max_length=100, null=False, blank=False) - sentat = models.DateTimeField(null=False, blank=False, auto_now=True) + user = models.OneToOneField(User, null=False, blank=False) + email = models.EmailField(max_length=75, null=False, blank=False) + token = models.CharField(max_length=100, null=False, blank=False) + sentat = models.DateTimeField(null=False, blank=False, auto_now=True) diff --git a/pgweb/account/oauthclient.py b/pgweb/account/oauthclient.py index 194bf2e5..27c79848 100644 --- a/pgweb/account/oauthclient.py +++ b/pgweb/account/oauthclient.py @@ -12,71 +12,71 @@ log = logging.getLogger(__name__) class OAuthException(Exception): - pass + pass # # Generic OAuth login for multiple providers # def _login_oauth(request, provider, authurl, tokenurl, scope, authdatafunc): - from requests_oauthlib import OAuth2Session + from requests_oauthlib import OAuth2Session - client_id = settings.OAUTH[provider]['clientid'] - client_secret = settings.OAUTH[provider]['secret'] - redir = '{0}/account/login/{1}/'.format(settings.SITE_ROOT, provider) + client_id = settings.OAUTH[provider]['clientid'] + client_secret = settings.OAUTH[provider]['secret'] + redir = '{0}/account/login/{1}/'.format(settings.SITE_ROOT, provider) - oa = OAuth2Session(client_id, scope=scope, redirect_uri=redir) - if request.GET.has_key('code'): - log.info("Completing {0} oauth2 step from {1}".format(provider, get_client_ip(request))) + oa = OAuth2Session(client_id, scope=scope, redirect_uri=redir) + if request.GET.has_key('code'): + log.info("Completing {0} oauth2 step from {1}".format(provider, get_client_ip(request))) - # Receiving a login request from the provider, so validate data - # and log the user in. - if request.GET.get('state', '') != request.session.pop('oauth_state'): - log.warning("Invalid state received in {0} oauth2 step from {1}".format(provider, get_client_ip(request))) - raise OAuthException("Invalid OAuth state received") + # Receiving a login request from the provider, so validate data + # and log the user in. + if request.GET.get('state', '') != request.session.pop('oauth_state'): + log.warning("Invalid state received in {0} oauth2 step from {1}".format(provider, get_client_ip(request))) + raise OAuthException("Invalid OAuth state received") - token = oa.fetch_token(tokenurl, - client_secret=client_secret, - code=request.GET['code']) - try: - (email, firstname, lastname) = authdatafunc(oa) - email = email.lower() - except KeyError, e: - log.warning("Oauth signing using {0} was missing data: {1}".format(provider, e)) - return HttpResponse('OAuth login was missing critical data. To log in, you need to allow access to email, first name and last name!') + token = oa.fetch_token(tokenurl, + client_secret=client_secret, + code=request.GET['code']) + try: + (email, firstname, lastname) = authdatafunc(oa) + email = email.lower() + except KeyError, e: + log.warning("Oauth signing using {0} was missing data: {1}".format(provider, e)) + return HttpResponse('OAuth login was missing critical data. To log in, you need to allow access to email, first name and last name!') - try: - user = User.objects.get(email=email) - except User.DoesNotExist: - log.info("Oauth signin of {0} using {1} from {2}. User not found, offering signup.".format(email, provider, get_client_ip(request))) + try: + user = User.objects.get(email=email) + except User.DoesNotExist: + log.info("Oauth signin of {0} using {1} from {2}. User not found, offering signup.".format(email, provider, get_client_ip(request))) - # Offer the user a chance to sign up. The full flow is - # handled elsewhere, so store the details we got from - # the oauth login in the session, and pass the user on. - request.session['oauth_email'] = email - request.session['oauth_firstname'] = firstname or '' - request.session['oauth_lastname'] = lastname or '' - return HttpResponseRedirect('/account/signup/oauth/') + # Offer the user a chance to sign up. The full flow is + # handled elsewhere, so store the details we got from + # the oauth login in the session, and pass the user on. + request.session['oauth_email'] = email + request.session['oauth_firstname'] = firstname or '' + request.session['oauth_lastname'] = lastname or '' + return HttpResponseRedirect('/account/signup/oauth/') - log.info("Oauth signin of {0} using {1} from {2}.".format(email, provider, get_client_ip(request))) + log.info("Oauth signin of {0} using {1} from {2}.".format(email, provider, get_client_ip(request))) - user.backend = settings.AUTHENTICATION_BACKENDS[0] - django_login(request, user) - n = request.session.pop('login_next') - if n: - return HttpResponseRedirect(n) - else: - return HttpResponseRedirect('/account/') - else: - log.info("Initiating {0} oauth2 step from {1}".format(provider, get_client_ip(request))) - # First step is redirect to provider - authorization_url, state = oa.authorization_url( - authurl, - prompt='consent', - ) - request.session['login_next'] = request.GET.get('next', '') - request.session['oauth_state'] = state - request.session.modified = True - return HttpResponseRedirect(authorization_url) + user.backend = settings.AUTHENTICATION_BACKENDS[0] + django_login(request, user) + n = request.session.pop('login_next') + if n: + return HttpResponseRedirect(n) + else: + return HttpResponseRedirect('/account/') + else: + log.info("Initiating {0} oauth2 step from {1}".format(provider, get_client_ip(request))) + # First step is redirect to provider + authorization_url, state = oa.authorization_url( + authurl, + prompt='consent', + ) + request.session['login_next'] = request.GET.get('next', '') + request.session['oauth_state'] = state + request.session.modified = True + return HttpResponseRedirect(authorization_url) # @@ -84,82 +84,82 @@ def _login_oauth(request, provider, authurl, tokenurl, scope, authdatafunc): # Registration: https://console.developers.google.com/apis/ # def oauth_login_google(request): - def _google_auth_data(oa): - r = oa.get('https://www.googleapis.com/oauth2/v1/userinfo').json() - if not r['verified_email']: - raise OAuthException("The email in your google profile must be verified in order to log in") - return (r['email'], - r.get('given_name', ''), - r.get('family_name', '')) + def _google_auth_data(oa): + r = oa.get('https://www.googleapis.com/oauth2/v1/userinfo').json() + if not r['verified_email']: + raise OAuthException("The email in your google profile must be verified in order to log in") + return (r['email'], + r.get('given_name', ''), + r.get('family_name', '')) - return _login_oauth( - request, - 'google', - 'https://accounts.google.com/o/oauth2/v2/auth', - 'https://accounts.google.com/o/oauth2/token', - ['https://www.googleapis.com/auth/userinfo.email', - 'https://www.googleapis.com/auth/userinfo.profile'], - _google_auth_data) + return _login_oauth( + request, + 'google', + 'https://accounts.google.com/o/oauth2/v2/auth', + 'https://accounts.google.com/o/oauth2/token', + ['https://www.googleapis.com/auth/userinfo.email', + 'https://www.googleapis.com/auth/userinfo.profile'], + _google_auth_data) # # Github login # Registration: https://github.com/settings/developers # def oauth_login_github(request): - def _github_auth_data(oa): - # Github just returns full name, so we're just going to have to - # split that. - r = oa.get('https://api.github.com/user').json() - if 'name' in r and r['name']: - n = r['name'].split(None, 1) - # Some accounts only have one name, extend with an empty - # lastname, so the user can fill it out manually. - while len(n) < 2: - n.append('') - else: - # Some github accounts have no name on them, so we can just - # let the user fill it out manually in that case. - n = ['',''] - # Email is at a separate endpoint - r = oa.get('https://api.github.com/user/emails').json() - for e in r: - if e['verified'] and e['primary']: - return ( - e['email'], - n[0], - n[1], - ) - raise OAuthException("Your GitHub profile must include a verified email address in order to log in") + def _github_auth_data(oa): + # Github just returns full name, so we're just going to have to + # split that. + r = oa.get('https://api.github.com/user').json() + if 'name' in r and r['name']: + n = r['name'].split(None, 1) + # Some accounts only have one name, extend with an empty + # lastname, so the user can fill it out manually. + while len(n) < 2: + n.append('') + else: + # Some github accounts have no name on them, so we can just + # let the user fill it out manually in that case. + n = ['',''] + # Email is at a separate endpoint + r = oa.get('https://api.github.com/user/emails').json() + for e in r: + if e['verified'] and e['primary']: + return ( + e['email'], + n[0], + n[1], + ) + raise OAuthException("Your GitHub profile must include a verified email address in order to log in") - return _login_oauth( - request, - 'github', - 'https://github.com/login/oauth/authorize', - 'https://github.com/login/oauth/access_token', - ['user:email', ], - _github_auth_data) + return _login_oauth( + request, + 'github', + 'https://github.com/login/oauth/authorize', + 'https://github.com/login/oauth/access_token', + ['user:email', ], + _github_auth_data) # # Facebook login # Registration: https://developers.facebook.com/apps # def oauth_login_facebook(request): - def _facebook_auth_data(oa): - r = oa.get('https://graph.facebook.com/me?fields=email,first_name,last_name').json() - if not 'email' in r: - raise OAuthException("Your Facebook profile must provide an email address in order to log in") + def _facebook_auth_data(oa): + r = oa.get('https://graph.facebook.com/me?fields=email,first_name,last_name').json() + if not 'email' in r: + raise OAuthException("Your Facebook profile must provide an email address in order to log in") - return (r['email'], - r.get('first_name', ''), - r.get('last_name', '')) + return (r['email'], + r.get('first_name', ''), + r.get('last_name', '')) - return _login_oauth( - request, - 'facebook', - 'https://www.facebook.com/dialog/oauth', - 'https://graph.facebook.com/oauth/access_token', - ['public_profile', 'email', ], - _facebook_auth_data) + return _login_oauth( + request, + 'facebook', + 'https://www.facebook.com/dialog/oauth', + 'https://graph.facebook.com/oauth/access_token', + ['public_profile', 'email', ], + _facebook_auth_data) # @@ -167,32 +167,32 @@ def oauth_login_facebook(request): # Registration: https://apps.dev.microsoft.com/ # def oauth_login_microsoft(request): - def _microsoft_auth_data(oa): - r = oa.get("https://apis.live.net/v5.0/me").json() - if not 'emails' in r or not 'account' in r['emails']: - raise OAuthException("Your Facebook profile must provide an email address in order to log in") + def _microsoft_auth_data(oa): + r = oa.get("https://apis.live.net/v5.0/me").json() + if not 'emails' in r or not 'account' in r['emails']: + raise OAuthException("Your Facebook profile must provide an email address in order to log in") - return (r['emails']['account'], - r.get('first_name', ''), - r.get('last_name', '')) + return (r['emails']['account'], + r.get('first_name', ''), + r.get('last_name', '')) - return _login_oauth( - request, - 'microsoft', - 'https://login.live.com/oauth20_authorize.srf', - 'https://login.live.com/oauth20_token.srf', - ['wl.basic', 'wl.emails' ], - _microsoft_auth_data) + return _login_oauth( + request, + 'microsoft', + 'https://login.live.com/oauth20_authorize.srf', + 'https://login.live.com/oauth20_token.srf', + ['wl.basic', 'wl.emails' ], + _microsoft_auth_data) def login_oauth(request, provider): - fn = 'oauth_login_{0}'.format(provider) - m = sys.modules[__name__] - if hasattr(m, fn): - try: - return getattr(m, fn)(request) - except OAuthException, e: - return HttpResponse(e) - except Exception, e: - log.error('Excpetion during OAuth: %s' % e) - return HttpResponse('An unhandled exception occurred during the authentication process') + fn = 'oauth_login_{0}'.format(provider) + m = sys.modules[__name__] + if hasattr(m, fn): + try: + return getattr(m, fn)(request) + except OAuthException, e: + return HttpResponse(e) + except Exception, e: + log.error('Excpetion during OAuth: %s' % e) + return HttpResponse('An unhandled exception occurred during the authentication process') diff --git a/pgweb/account/recaptcha.py b/pgweb/account/recaptcha.py index 0af7b1bf..9b667955 100644 --- a/pgweb/account/recaptcha.py +++ b/pgweb/account/recaptcha.py @@ -15,77 +15,77 @@ import logging log = logging.getLogger(__name__) class ReCaptchaWidget(forms.widgets.Widget): - def render(self, name, value, attrs=None): - if settings.NOCAPTCHA: - return u'Captcha disabled on this system' - log.info("Generated captcha") - return mark_safe(u'
'.format(settings.RECAPTCHA_SITE_KEY)) + def render(self, name, value, attrs=None): + if settings.NOCAPTCHA: + return u'Captcha disabled on this system' + log.info("Generated captcha") + return mark_safe(u'
'.format(settings.RECAPTCHA_SITE_KEY)) - def value_from_datadict(self, data, files, name): - if settings.NOCAPTCHA: - return None - if data.has_key('g-recaptcha-response'): - return data['g-recaptcha-response'] - return None + def value_from_datadict(self, data, files, name): + if settings.NOCAPTCHA: + return None + if data.has_key('g-recaptcha-response'): + return data['g-recaptcha-response'] + return None class ReCaptchaField(forms.CharField): - def __init__(self, *args, **kwargs): - self.remoteip = None - self.widget = ReCaptchaWidget() - self.required = not settings.NOCAPTCHA - super(ReCaptchaField, self).__init__(*args, **kwargs) + def __init__(self, *args, **kwargs): + self.remoteip = None + self.widget = ReCaptchaWidget() + self.required = not settings.NOCAPTCHA + super(ReCaptchaField, self).__init__(*args, **kwargs) - def set_ip(self, ip): - self.remoteip = ip + def set_ip(self, ip): + self.remoteip = ip - def clean(self, value): - if settings.NOCAPTCHA: - return True + def clean(self, value): + if settings.NOCAPTCHA: + return True - super(ReCaptchaField, self).clean(value) + super(ReCaptchaField, self).clean(value) - # Validate the recaptcha - c = httplib.HTTPSConnection('www.google.com', strict=True, timeout=5) - param = { - 'secret': settings.RECAPTCHA_SECRET_KEY, - 'response': value, - } + # Validate the recaptcha + c = httplib.HTTPSConnection('www.google.com', strict=True, timeout=5) + param = { + 'secret': settings.RECAPTCHA_SECRET_KEY, + 'response': value, + } - # Temporarily don't include remoteip, because it only shows our ssl terminating - # frontends. -# if self.remoteip: -# param['remoteip'] = self.remoteip + # Temporarily don't include remoteip, because it only shows our ssl terminating + # frontends. +# if self.remoteip: +# param['remoteip'] = self.remoteip - try: - c.request('POST', '/recaptcha/api/siteverify', urllib.urlencode(param), { - 'Content-type': 'application/x-www-form-urlencoded', - }) - c.sock.settimeout(10) - except Exception, e: - # Error to connect at TCP level - log.error('Failed to connect to google recaptcha API: %s' % e) - raise ValidationError('Failed in API call to google recaptcha') + try: + c.request('POST', '/recaptcha/api/siteverify', urllib.urlencode(param), { + 'Content-type': 'application/x-www-form-urlencoded', + }) + c.sock.settimeout(10) + except Exception, e: + # Error to connect at TCP level + log.error('Failed to connect to google recaptcha API: %s' % e) + raise ValidationError('Failed in API call to google recaptcha') - try: - r = c.getresponse() - except: - log.error('Failed in API call to google recaptcha') - raise ValidationError('Failed in API call to google recaptcha') - if r.status != 200: - log.error('Invalid response code from google recaptcha') - raise ValidationError('Invalid response code from google recaptcha') + try: + r = c.getresponse() + except: + log.error('Failed in API call to google recaptcha') + raise ValidationError('Failed in API call to google recaptcha') + if r.status != 200: + log.error('Invalid response code from google recaptcha') + raise ValidationError('Invalid response code from google recaptcha') - try: - j = json.loads(r.read()) - except: - log.error('Invalid response structure from google recaptcha') - raise ValidationError('Invalid response structure from google recaptcha') + try: + j = json.loads(r.read()) + except: + log.error('Invalid response structure from google recaptcha') + raise ValidationError('Invalid response structure from google recaptcha') - if not j['success']: - log.warning('Incorrect recaptcha entered. Trying again.') - raise ValidationError('Invalid. Try again.') + if not j['success']: + log.warning('Incorrect recaptcha entered. Trying again.') + raise ValidationError('Invalid. Try again.') - # Recaptcha validated ok! - log.info("Successful recaptcha validation") - return True + # Recaptcha validated ok! + log.info("Successful recaptcha validation") + return True diff --git a/pgweb/account/urls.py b/pgweb/account/urls.py index cee17968..9467ca0b 100644 --- a/pgweb/account/urls.py +++ b/pgweb/account/urls.py @@ -5,53 +5,53 @@ import pgweb.account.views import pgweb.account.oauthclient urlpatterns = [ - url(r'^$', pgweb.account.views.home), + url(r'^$', pgweb.account.views.home), - # Community authenticatoin - url(r'^auth/(\d+)/$', pgweb.account.views.communityauth), - url(r'^auth/(\d+)/logout/$', pgweb.account.views.communityauth_logout), - url(r'^auth/(\d+)/consent/$', pgweb.account.views.communityauth_consent), - url(r'^auth/(\d+)/search/$', pgweb.account.views.communityauth_search), - url(r'^auth/(\d+)/getkeys/(\d+/)?$', pgweb.account.views.communityauth_getkeys), + # Community authenticatoin + url(r'^auth/(\d+)/$', pgweb.account.views.communityauth), + url(r'^auth/(\d+)/logout/$', pgweb.account.views.communityauth_logout), + url(r'^auth/(\d+)/consent/$', pgweb.account.views.communityauth_consent), + url(r'^auth/(\d+)/search/$', pgweb.account.views.communityauth_search), + url(r'^auth/(\d+)/getkeys/(\d+/)?$', pgweb.account.views.communityauth_getkeys), - # Profile - url(r'^profile/$', pgweb.account.views.profile), - url(r'^profile/change_email/$', pgweb.account.views.change_email), - url(r'^profile/change_email/([0-9a-f]+)/$', pgweb.account.views.confirm_change_email), + # Profile + url(r'^profile/$', pgweb.account.views.profile), + url(r'^profile/change_email/$', pgweb.account.views.change_email), + url(r'^profile/change_email/([0-9a-f]+)/$', pgweb.account.views.confirm_change_email), - # List of items to edit - url(r'^edit/(.*)/$', pgweb.account.views.listobjects), + # List of items to edit + url(r'^edit/(.*)/$', pgweb.account.views.listobjects), - # News & Events - url(r'^news/(.*)/$', pgweb.news.views.form), - url(r'^events/(.*)/$', pgweb.events.views.form), + # News & Events + url(r'^news/(.*)/$', pgweb.news.views.form), + url(r'^events/(.*)/$', pgweb.events.views.form), - # Software catalogue - url(r'^organisations/(.*)/$', pgweb.core.views.organisationform), - url(r'^products/(.*)/$', pgweb.downloads.views.productform), + # Software catalogue + url(r'^organisations/(.*)/$', pgweb.core.views.organisationform), + url(r'^products/(.*)/$', pgweb.downloads.views.productform), - # Organisation information - url(r'^orglist/$', pgweb.account.views.orglist), + # Organisation information + url(r'^orglist/$', pgweb.account.views.orglist), - # Professional services - url(r'^services/(.*)/$', pgweb.profserv.views.profservform), + # Professional services + url(r'^services/(.*)/$', pgweb.profserv.views.profservform), - # Docs comments - url(r'^comments/(new)/(.*)/(.*)/$', pgweb.docs.views.commentform), + # Docs comments + url(r'^comments/(new)/(.*)/(.*)/$', pgweb.docs.views.commentform), - # Log in, logout, change password etc - url(r'^login/$', pgweb.account.views.login), - url(r'^logout/$', pgweb.account.views.logout), - url(r'^changepwd/$', pgweb.account.views.changepwd), - url(r'^changepwd/done/$', pgweb.account.views.change_done), - url(r'^reset/$', pgweb.account.views.resetpwd), - url(r'^reset/done/$', pgweb.account.views.reset_done), - url(r'^reset/(?P[0-9A-Za-z_\-]+)-(?P[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', pgweb.account.views.reset_confirm), - url(r'^reset/complete/$', pgweb.account.views.reset_complete), - url(r'^signup/$', pgweb.account.views.signup), - url(r'^signup/complete/$', pgweb.account.views.signup_complete), - url(r'^signup/oauth/$', pgweb.account.views.signup_oauth), + # Log in, logout, change password etc + url(r'^login/$', pgweb.account.views.login), + url(r'^logout/$', pgweb.account.views.logout), + url(r'^changepwd/$', pgweb.account.views.changepwd), + url(r'^changepwd/done/$', pgweb.account.views.change_done), + url(r'^reset/$', pgweb.account.views.resetpwd), + url(r'^reset/done/$', pgweb.account.views.reset_done), + url(r'^reset/(?P[0-9A-Za-z_\-]+)-(?P[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', pgweb.account.views.reset_confirm), + url(r'^reset/complete/$', pgweb.account.views.reset_complete), + url(r'^signup/$', pgweb.account.views.signup), + url(r'^signup/complete/$', pgweb.account.views.signup_complete), + url(r'^signup/oauth/$', pgweb.account.views.signup_oauth), ] for provider in settings.OAUTH.keys(): - urlpatterns.append(url(r'^login/({0})/$'.format(provider), pgweb.account.oauthclient.login_oauth)) + urlpatterns.append(url(r'^login/({0})/$'.format(provider), pgweb.account.oauthclient.login_oauth)) diff --git a/pgweb/account/views.py b/pgweb/account/views.py index 97ae3cf1..f210013e 100644 --- a/pgweb/account/views.py +++ b/pgweb/account/views.py @@ -48,308 +48,308 @@ OAUTH_PASSWORD_STORE='oauth_signin_account_no_password' @login_required def home(request): - myarticles = NewsArticle.objects.filter(org__managers=request.user, approved=False) - myevents = Event.objects.filter(org__managers=request.user, approved=False) - myorgs = Organisation.objects.filter(managers=request.user, approved=False) - myproducts = Product.objects.filter(org__managers=request.user, approved=False) - myprofservs = ProfessionalService.objects.filter(org__managers=request.user, approved=False) - return render_pgweb(request, 'account', 'account/index.html', { - 'newsarticles': myarticles, - 'events': myevents, - 'organisations': myorgs, - 'products': myproducts, - 'profservs': myprofservs, - }) + myarticles = NewsArticle.objects.filter(org__managers=request.user, approved=False) + myevents = Event.objects.filter(org__managers=request.user, approved=False) + myorgs = Organisation.objects.filter(managers=request.user, approved=False) + myproducts = Product.objects.filter(org__managers=request.user, approved=False) + myprofservs = ProfessionalService.objects.filter(org__managers=request.user, approved=False) + return render_pgweb(request, 'account', 'account/index.html', { + 'newsarticles': myarticles, + 'events': myevents, + 'organisations': myorgs, + 'products': myproducts, + 'profservs': myprofservs, + }) objtypes = { - 'news': { - 'title': 'News Article', - 'objects': lambda u: NewsArticle.objects.filter(org__managers=u), - }, - 'events': { - 'title': 'Event', - 'objects': lambda u: Event.objects.filter(org__managers=u), + 'news': { + 'title': 'News Article', + 'objects': lambda u: NewsArticle.objects.filter(org__managers=u), + }, + 'events': { + 'title': 'Event', + 'objects': lambda u: Event.objects.filter(org__managers=u), + }, + 'products': { + 'title': 'Product', + 'objects': lambda u: Product.objects.filter(org__managers=u), + }, + 'services': { + 'title': 'Professional Service', + 'objects': lambda u: ProfessionalService.objects.filter(org__managers=u), + }, + 'organisations': { + 'title': 'Organisation', + 'objects': lambda u: Organisation.objects.filter(managers=u), + 'submit_header': 'Before submitting a new Organisation, please verify on the list of current organisations if the organisation already exists. If it does, please contact the manager of the organisation to gain permissions.', }, - 'products': { - 'title': 'Product', - 'objects': lambda u: Product.objects.filter(org__managers=u), - }, - 'services': { - 'title': 'Professional Service', - 'objects': lambda u: ProfessionalService.objects.filter(org__managers=u), - }, - 'organisations': { - 'title': 'Organisation', - 'objects': lambda u: Organisation.objects.filter(managers=u), - 'submit_header': 'Before submitting a new Organisation, please verify on the list of current organisations if the organisation already exists. If it does, please contact the manager of the organisation to gain permissions.', - }, } @login_required @transaction.atomic def profile(request): - # We always have the user, but not always the profile. And we need a bit - # of a hack around the normal forms code since we have two different - # models on a single form. - (profile, created) = UserProfile.objects.get_or_create(pk=request.user.pk) + # We always have the user, but not always the profile. And we need a bit + # of a hack around the normal forms code since we have two different + # models on a single form. + (profile, created) = UserProfile.objects.get_or_create(pk=request.user.pk) - # Don't allow users whose accounts were created via oauth to change - # their email, since that would kill the connection between the - # accounts. - can_change_email = (request.user.password != OAUTH_PASSWORD_STORE) + # Don't allow users whose accounts were created via oauth to change + # their email, since that would kill the connection between the + # accounts. + can_change_email = (request.user.password != OAUTH_PASSWORD_STORE) - # We may have a contributor record - and we only show that part of the - # form if we have it for this user. - try: - contrib = Contributor.objects.get(user=request.user.pk) - except Contributor.DoesNotExist: - contrib = None + # We may have a contributor record - and we only show that part of the + # form if we have it for this user. + try: + contrib = Contributor.objects.get(user=request.user.pk) + except Contributor.DoesNotExist: + contrib = None - contribform = None + contribform = None - if request.method == 'POST': - # Process this form - userform = UserForm(data=request.POST, instance=request.user) - profileform = UserProfileForm(data=request.POST, instance=profile) - if contrib: - contribform = ContributorForm(data=request.POST, instance=contrib) + if request.method == 'POST': + # Process this form + userform = UserForm(data=request.POST, instance=request.user) + profileform = UserProfileForm(data=request.POST, instance=profile) + if contrib: + contribform = ContributorForm(data=request.POST, instance=contrib) - if userform.is_valid() and profileform.is_valid() and (not contrib or contribform.is_valid()): - userform.save() - profileform.save() - if contrib: - contribform.save() - return HttpResponseRedirect("/account/") - else: - # Generate form - userform = UserForm(instance=request.user) - profileform = UserProfileForm(instance=profile) - if contrib: - contribform = ContributorForm(instance=contrib) + if userform.is_valid() and profileform.is_valid() and (not contrib or contribform.is_valid()): + userform.save() + profileform.save() + if contrib: + contribform.save() + return HttpResponseRedirect("/account/") + else: + # Generate form + userform = UserForm(instance=request.user) + profileform = UserProfileForm(instance=profile) + if contrib: + contribform = ContributorForm(instance=contrib) - return render_pgweb(request, 'account', 'account/userprofileform.html', { - 'userform': userform, - 'profileform': profileform, - 'contribform': contribform, - 'can_change_email': can_change_email, - }) + return render_pgweb(request, 'account', 'account/userprofileform.html', { + 'userform': userform, + 'profileform': profileform, + 'contribform': contribform, + 'can_change_email': can_change_email, + }) @login_required @transaction.atomic def change_email(request): - tokens = EmailChangeToken.objects.filter(user=request.user) - token = len(tokens) and tokens[0] or None + tokens = EmailChangeToken.objects.filter(user=request.user) + token = len(tokens) and tokens[0] or None - if request.user.password == OAUTH_PASSWORD_STORE: - # Link shouldn't exist in this case, so just throw an unfriendly - # error message. - return HttpServerError(request, "This account cannot change email address as it's connected to a third party login site.") + if request.user.password == OAUTH_PASSWORD_STORE: + # Link shouldn't exist in this case, so just throw an unfriendly + # error message. + return HttpServerError(request, "This account cannot change email address as it's connected to a third party login site.") - if request.method == 'POST': - form = ChangeEmailForm(request.user, data=request.POST) - if form.is_valid(): - # If there is an existing token, delete it - if token: - token.delete() + if request.method == 'POST': + form = ChangeEmailForm(request.user, data=request.POST) + if form.is_valid(): + # If there is an existing token, delete it + if token: + token.delete() - # Create a new token - token = EmailChangeToken(user=request.user, - email=form.cleaned_data['email'].lower(), - token=generate_random_token()) - token.save() + # Create a new token + token = EmailChangeToken(user=request.user, + email=form.cleaned_data['email'].lower(), + token=generate_random_token()) + token.save() - send_template_mail(settings.ACCOUNTS_NOREPLY_FROM, - form.cleaned_data['email'], - 'Your postgresql.org community account', - 'account/email_change_email.txt', - { 'token': token , 'user': request.user, } - ) - return HttpResponseRedirect('done/') - else: - form = ChangeEmailForm(request.user) + send_template_mail(settings.ACCOUNTS_NOREPLY_FROM, + form.cleaned_data['email'], + 'Your postgresql.org community account', + 'account/email_change_email.txt', + { 'token': token , 'user': request.user, } + ) + return HttpResponseRedirect('done/') + else: + form = ChangeEmailForm(request.user) - return render_pgweb(request, 'account', 'account/emailchangeform.html', { - 'form': form, - 'token': token, - }) + return render_pgweb(request, 'account', 'account/emailchangeform.html', { + 'form': form, + 'token': token, + }) @login_required @transaction.atomic def confirm_change_email(request, tokenhash): - tokens = EmailChangeToken.objects.filter(user=request.user, token=tokenhash) - token = len(tokens) and tokens[0] or None + tokens = EmailChangeToken.objects.filter(user=request.user, token=tokenhash) + token = len(tokens) and tokens[0] or None - if request.user.password == OAUTH_PASSWORD_STORE: - # Link shouldn't exist in this case, so just throw an unfriendly - # error message. - return HttpServerError(request, "This account cannot change email address as it's connected to a third party login site.") + if request.user.password == OAUTH_PASSWORD_STORE: + # Link shouldn't exist in this case, so just throw an unfriendly + # error message. + return HttpServerError(request, "This account cannot change email address as it's connected to a third party login site.") - if token: - # Valid token find, so change the email address - request.user.email = token.email.lower() - request.user.save() - token.delete() + if token: + # Valid token find, so change the email address + request.user.email = token.email.lower() + request.user.save() + token.delete() - return render_pgweb(request, 'account', 'account/emailchangecompleted.html', { - 'token': tokenhash, - 'success': token and True or False, - }) + return render_pgweb(request, 'account', 'account/emailchangecompleted.html', { + 'token': tokenhash, + 'success': token and True or False, + }) @login_required def listobjects(request, objtype): - if not objtypes.has_key(objtype): - raise Http404("Object type not found") - o = objtypes[objtype] + if not objtypes.has_key(objtype): + raise Http404("Object type not found") + o = objtypes[objtype] - return render_pgweb(request, 'account', 'account/objectlist.html', { - 'objects': { - 'approved': o['objects'](request.user).filter(approved=True), - 'unapproved': o['objects'](request.user).filter(approved=False), - }, - 'title': o['title'], - 'submit_header': o.has_key('submit_header') and o['submit_header'] or None, - 'suburl': objtype, - }) + return render_pgweb(request, 'account', 'account/objectlist.html', { + 'objects': { + 'approved': o['objects'](request.user).filter(approved=True), + 'unapproved': o['objects'](request.user).filter(approved=False), + }, + 'title': o['title'], + 'submit_header': o.has_key('submit_header') and o['submit_header'] or None, + 'suburl': objtype, + }) @login_required def orglist(request): - orgs = Organisation.objects.filter(approved=True) + orgs = Organisation.objects.filter(approved=True) - return render_pgweb(request, 'account', 'account/orglist.html', { - 'orgs': orgs, - }) + return render_pgweb(request, 'account', 'account/orglist.html', { + 'orgs': orgs, + }) def login(request): - return authviews.login(request, template_name='account/login.html', - authentication_form=PgwebAuthenticationForm, - extra_context={ - 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())], - }) + return authviews.login(request, template_name='account/login.html', + authentication_form=PgwebAuthenticationForm, + extra_context={ + 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())], + }) def logout(request): - return authviews.logout_then_login(request, login_url='/') + return authviews.logout_then_login(request, login_url='/') def changepwd(request): - if hasattr(request.user, 'password') and request.user.password == OAUTH_PASSWORD_STORE: - return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.") + if hasattr(request.user, 'password') and request.user.password == OAUTH_PASSWORD_STORE: + return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.") - log.info("Initiating password change from {0}".format(get_client_ip(request))) - return authviews.password_change(request, - template_name='account/password_change.html', - post_change_redirect='/account/changepwd/done/') + log.info("Initiating password change from {0}".format(get_client_ip(request))) + return authviews.password_change(request, + template_name='account/password_change.html', + post_change_redirect='/account/changepwd/done/') def resetpwd(request): - # Basic django password reset feature is completely broken. For example, it does not support - # resetting passwords for users with "old hashes", which means they have no way to ever - # recover. So implement our own, since it's quite the trivial feature. - if request.method == "POST": - try: - u = User.objects.get(email__iexact=request.POST['email']) - if u.password == OAUTH_PASSWORD_STORE: - return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.") - except User.DoesNotExist: - log.info("Attempting to reset password of {0}, user not found".format(request.POST['email'])) - return HttpResponseRedirect('/account/reset/done/') + # Basic django password reset feature is completely broken. For example, it does not support + # resetting passwords for users with "old hashes", which means they have no way to ever + # recover. So implement our own, since it's quite the trivial feature. + if request.method == "POST": + try: + u = User.objects.get(email__iexact=request.POST['email']) + if u.password == OAUTH_PASSWORD_STORE: + return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.") + except User.DoesNotExist: + log.info("Attempting to reset password of {0}, user not found".format(request.POST['email'])) + return HttpResponseRedirect('/account/reset/done/') - form = PgwebPasswordResetForm(data=request.POST) - if form.is_valid(): - log.info("Initiating password set from {0} for {1}".format(get_client_ip(request), form.cleaned_data['email'])) - token = default_token_generator.make_token(u) - send_template_mail(settings.ACCOUNTS_NOREPLY_FROM, - form.cleaned_data['email'], - 'Password reset for your postgresql.org account', - 'account/password_reset_email.txt', - { - 'user': u, - 'uid': urlsafe_base64_encode(force_bytes(u.pk)), - 'token': token, - }, - ) - return HttpResponseRedirect('/account/reset/done/') - else: - form = PgwebPasswordResetForm() + form = PgwebPasswordResetForm(data=request.POST) + if form.is_valid(): + log.info("Initiating password set from {0} for {1}".format(get_client_ip(request), form.cleaned_data['email'])) + token = default_token_generator.make_token(u) + send_template_mail(settings.ACCOUNTS_NOREPLY_FROM, + form.cleaned_data['email'], + 'Password reset for your postgresql.org account', + 'account/password_reset_email.txt', + { + 'user': u, + 'uid': urlsafe_base64_encode(force_bytes(u.pk)), + 'token': token, + }, + ) + return HttpResponseRedirect('/account/reset/done/') + else: + form = PgwebPasswordResetForm() - return render_pgweb(request, 'account', 'account/password_reset.html', { - 'form': form, - }) + return render_pgweb(request, 'account', 'account/password_reset.html', { + 'form': form, + }) def change_done(request): - log.info("Password change done from {0}".format(get_client_ip(request))) - return authviews.password_change_done(request, template_name='account/password_change_done.html') + log.info("Password change done from {0}".format(get_client_ip(request))) + return authviews.password_change_done(request, template_name='account/password_change_done.html') def reset_done(request): - log.info("Password reset done from {0}".format(get_client_ip(request))) - return authviews.password_reset_done(request, template_name='account/password_reset_done.html') + log.info("Password reset done from {0}".format(get_client_ip(request))) + return authviews.password_reset_done(request, template_name='account/password_reset_done.html') def reset_confirm(request, uidb64, token): - log.info("Confirming password reset for uidb {0}, token {1} from {2}".format(uidb64, token, get_client_ip(request))) - return authviews.password_reset_confirm(request, - uidb64=uidb64, - token=token, - template_name='account/password_reset_confirm.html', - post_reset_redirect='/account/reset/complete/') + log.info("Confirming password reset for uidb {0}, token {1} from {2}".format(uidb64, token, get_client_ip(request))) + return authviews.password_reset_confirm(request, + uidb64=uidb64, + token=token, + template_name='account/password_reset_confirm.html', + post_reset_redirect='/account/reset/complete/') def reset_complete(request): - log.info("Password reset completed for user from {0}".format(get_client_ip(request))) - return authviews.password_reset_complete(request, template_name='account/password_reset_complete.html') + log.info("Password reset completed for user from {0}".format(get_client_ip(request))) + return authviews.password_reset_complete(request, template_name='account/password_reset_complete.html') @script_sources('https://www.google.com/recaptcha/') @script_sources('https://www.gstatic.com/recaptcha/') @frame_sources('https://www.google.com/') def signup(request): - if request.user.is_authenticated(): - return HttpServerError(request, "You must log out before you can sign up for a new account") + if request.user.is_authenticated(): + return HttpServerError(request, "You must log out before you can sign up for a new account") - if request.method == 'POST': - # Attempt to create user then, eh? - form = SignupForm(get_client_ip(request), data=request.POST) - if form.is_valid(): - # Attempt to create the user here - # XXX: Do we need to validate something else? - log.info("Creating user for {0} from {1}".format(form.cleaned_data['username'], get_client_ip(request))) + if request.method == 'POST': + # Attempt to create user then, eh? + form = SignupForm(get_client_ip(request), data=request.POST) + if form.is_valid(): + # Attempt to create the user here + # XXX: Do we need to validate something else? + log.info("Creating user for {0} from {1}".format(form.cleaned_data['username'], get_client_ip(request))) - user = User.objects.create_user(form.cleaned_data['username'].lower(), form.cleaned_data['email'].lower(), last_login=datetime.now()) - user.first_name = form.cleaned_data['first_name'] - user.last_name = form.cleaned_data['last_name'] + user = User.objects.create_user(form.cleaned_data['username'].lower(), form.cleaned_data['email'].lower(), last_login=datetime.now()) + user.first_name = form.cleaned_data['first_name'] + user.last_name = form.cleaned_data['last_name'] - # generate a random value for password. It won't be possible to log in with it, but - # it creates more entropy for the token generator (I think). - user.password = generate_random_token() - user.save() + # generate a random value for password. It won't be possible to log in with it, but + # it creates more entropy for the token generator (I think). + user.password = generate_random_token() + user.save() - # Now generate a token - token = default_token_generator.make_token(user) - log.info("Generated token {0} for user {1} from {2}".format(token, form.cleaned_data['username'], get_client_ip(request))) + # Now generate a token + token = default_token_generator.make_token(user) + log.info("Generated token {0} for user {1} from {2}".format(token, form.cleaned_data['username'], get_client_ip(request))) - # Generate an outgoing email - send_template_mail(settings.ACCOUNTS_NOREPLY_FROM, - form.cleaned_data['email'], - 'Your new postgresql.org community account', - 'account/new_account_email.txt', - { 'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user} - ) + # Generate an outgoing email + send_template_mail(settings.ACCOUNTS_NOREPLY_FROM, + form.cleaned_data['email'], + 'Your new postgresql.org community account', + 'account/new_account_email.txt', + { 'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user} + ) - return HttpResponseRedirect('/account/signup/complete/') - else: - form = SignupForm(get_client_ip(request)) + return HttpResponseRedirect('/account/signup/complete/') + else: + form = SignupForm(get_client_ip(request)) - return render_pgweb(request, 'account', 'base/form.html', { - 'form': form, - 'formitemtype': 'Account', - 'form_intro': """ + return render_pgweb(request, 'account', 'base/form.html', { + 'form': form, + 'formitemtype': 'Account', + 'form_intro': """ To sign up for a free community account, enter your preferred userid and email address. Note that a community account is only needed if you want to submit information - all content is available for reading without an account. """, - 'savebutton': 'Sign up', - 'operation': 'New', - 'recaptcha': True, - }) + 'savebutton': 'Sign up', + 'operation': 'New', + 'recaptcha': True, + }) def signup_complete(request): - return render_pgweb(request, 'account', 'account/signup_complete.html', { - }) + return render_pgweb(request, 'account', 'account/signup_complete.html', { + }) @script_sources('https://www.google.com/recaptcha/') @@ -357,265 +357,265 @@ def signup_complete(request): @frame_sources('https://www.google.com/') @transaction.atomic def signup_oauth(request): - if not request.session.has_key('oauth_email') \ - or not request.session.has_key('oauth_firstname') \ - or not request.session.has_key('oauth_lastname'): - return HttpServerError(request, 'Invalid redirect received') + if not request.session.has_key('oauth_email') \ + or not request.session.has_key('oauth_firstname') \ + or not request.session.has_key('oauth_lastname'): + return HttpServerError(request, 'Invalid redirect received') - if request.method == 'POST': - # Second stage, so create the account. But verify that the - # nonce matches. - data = request.POST.copy() - data['email'] = request.session['oauth_email'].lower() - data['first_name'] = request.session['oauth_firstname'] - data['last_name'] = request.session['oauth_lastname'] - form = SignupOauthForm(data=data) - if form.is_valid(): - log.info("Creating user for {0} from {1} from oauth signin of email {2}".format(form.cleaned_data['username'], get_client_ip(request), request.session['oauth_email'])) + if request.method == 'POST': + # Second stage, so create the account. But verify that the + # nonce matches. + data = request.POST.copy() + data['email'] = request.session['oauth_email'].lower() + data['first_name'] = request.session['oauth_firstname'] + data['last_name'] = request.session['oauth_lastname'] + form = SignupOauthForm(data=data) + if form.is_valid(): + log.info("Creating user for {0} from {1} from oauth signin of email {2}".format(form.cleaned_data['username'], get_client_ip(request), request.session['oauth_email'])) - user = User.objects.create_user(form.cleaned_data['username'].lower(), - request.session['oauth_email'].lower(), - last_login=datetime.now()) - user.first_name = request.session['oauth_firstname'] - user.last_name = request.session['oauth_lastname'] - user.password = OAUTH_PASSWORD_STORE - user.save() + user = User.objects.create_user(form.cleaned_data['username'].lower(), + request.session['oauth_email'].lower(), + last_login=datetime.now()) + user.first_name = request.session['oauth_firstname'] + user.last_name = request.session['oauth_lastname'] + user.password = OAUTH_PASSWORD_STORE + user.save() - # Clean up our session - del request.session['oauth_email'] - del request.session['oauth_firstname'] - del request.session['oauth_lastname'] - request.session.modified = True + # Clean up our session + del request.session['oauth_email'] + del request.session['oauth_firstname'] + del request.session['oauth_lastname'] + request.session.modified = True - # We can immediately log the user in because their email - # is confirmed. - user.backend = settings.AUTHENTICATION_BACKENDS[0] - django_login(request, user) + # We can immediately log the user in because their email + # is confirmed. + user.backend = settings.AUTHENTICATION_BACKENDS[0] + django_login(request, user) - # Redirect to the sessions page, or to the account page - # if none was given. - return HttpResponseRedirect(request.session.pop('login_next', '/account/')) - elif request.GET.has_key('do_abort'): - del request.session['oauth_email'] - del request.session['oauth_firstname'] - del request.session['oauth_lastname'] - request.session.modified = True - return HttpResponseRedirect(request.session.pop('login_next', '/')) - else: - # Generate possible new username - suggested_username = request.session['oauth_email'].replace('@', '.')[:30] + # Redirect to the sessions page, or to the account page + # if none was given. + return HttpResponseRedirect(request.session.pop('login_next', '/account/')) + elif request.GET.has_key('do_abort'): + del request.session['oauth_email'] + del request.session['oauth_firstname'] + del request.session['oauth_lastname'] + request.session.modified = True + return HttpResponseRedirect(request.session.pop('login_next', '/')) + else: + # Generate possible new username + suggested_username = request.session['oauth_email'].replace('@', '.')[:30] - # Auto generation requires firstname and lastname to be specified - f = request.session['oauth_firstname'].lower() - l = request.session['oauth_lastname'].lower() - if f and l: - for u in itertools.chain([ - u"{0}{1}".format(f, l[0]), - u"{0}{1}".format(f[0], l), - ], (u"{0}{1}{2}".format(f, l[0], n) for n in xrange(100))): - if not User.objects.filter(username=u[:30]).exists(): - suggested_username = u[:30] - break + # Auto generation requires firstname and lastname to be specified + f = request.session['oauth_firstname'].lower() + l = request.session['oauth_lastname'].lower() + if f and l: + for u in itertools.chain([ + u"{0}{1}".format(f, l[0]), + u"{0}{1}".format(f[0], l), + ], (u"{0}{1}{2}".format(f, l[0], n) for n in xrange(100))): + if not User.objects.filter(username=u[:30]).exists(): + suggested_username = u[:30] + break - form = SignupOauthForm(initial={ - 'username': suggested_username, - 'email': request.session['oauth_email'].lower(), - 'first_name': request.session['oauth_firstname'][:30], - 'last_name': request.session['oauth_lastname'][:30], - }) + form = SignupOauthForm(initial={ + 'username': suggested_username, + 'email': request.session['oauth_email'].lower(), + 'first_name': request.session['oauth_firstname'][:30], + 'last_name': request.session['oauth_lastname'][:30], + }) - return render_pgweb(request, 'account', 'account/signup_oauth.html', { - 'form': form, - 'operation': 'New account', - 'savebutton': 'Sign up for new account', - 'recaptcha': True, - }) + return render_pgweb(request, 'account', 'account/signup_oauth.html', { + 'form': form, + 'operation': 'New account', + 'savebutton': 'Sign up for new account', + 'recaptcha': True, + }) #### ## Community authentication endpoint #### def communityauth(request, siteid): - # Get whatever site the user is trying to log in to. - site = get_object_or_404(CommunityAuthSite, pk=siteid) + # Get whatever site the user is trying to log in to. + site = get_object_or_404(CommunityAuthSite, pk=siteid) - # "suburl" - old style way of passing parameters - # deprecated - will be removed once all sites have migrated - if request.GET.has_key('su'): - su = request.GET['su'] - if not su.startswith('/'): - su = None - else: - su = None + # "suburl" - old style way of passing parameters + # deprecated - will be removed once all sites have migrated + if request.GET.has_key('su'): + su = request.GET['su'] + if not su.startswith('/'): + su = None + else: + su = None - # "data" - new style way of passing parameter, where we only - # care that it's characters are what's in base64. - if request.GET.has_key('d'): - d = request.GET['d'] - if d != urllib.quote_plus(d, '=$'): - # Invalid character, so drop it - d = None - else: - d = None + # "data" - new style way of passing parameter, where we only + # care that it's characters are what's in base64. + if request.GET.has_key('d'): + d = request.GET['d'] + if d != urllib.quote_plus(d, '=$'): + # Invalid character, so drop it + d = None + else: + d = None - if d: - urldata = "?d=%s" % d - elif su: - urldata = "?su=%s" % su - else: - urldata = "" + if d: + urldata = "?d=%s" % d + elif su: + urldata = "?su=%s" % su + else: + urldata = "" - # Verify if the user is authenticated, and if he/she is not, generate - # a login form that has information about which site is being logged - # in to, and basic information about how the community login system - # works. - if not request.user.is_authenticated(): - if request.method == "POST" and 'next' in request.POST and 'this_is_the_login_form' in request.POST: - # This is a postback of the login form. So pick the next filed - # from that one, so we keep it across invalid password entries. - nexturl = request.POST['next'] - else: - nexturl = '/account/auth/%s/%s' % (siteid, urldata) - return authviews.login(request, template_name='account/login.html', - authentication_form=PgwebAuthenticationForm, - extra_context={ - 'sitename': site.name, - 'next': nexturl, - 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())], - }, - ) + # Verify if the user is authenticated, and if he/she is not, generate + # a login form that has information about which site is being logged + # in to, and basic information about how the community login system + # works. + if not request.user.is_authenticated(): + if request.method == "POST" and 'next' in request.POST and 'this_is_the_login_form' in request.POST: + # This is a postback of the login form. So pick the next filed + # from that one, so we keep it across invalid password entries. + nexturl = request.POST['next'] + else: + nexturl = '/account/auth/%s/%s' % (siteid, urldata) + return authviews.login(request, template_name='account/login.html', + authentication_form=PgwebAuthenticationForm, + extra_context={ + 'sitename': site.name, + 'next': nexturl, + 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())], + }, + ) - # When we reach this point, the user *has* already been authenticated. - # The request variable "su" *may* contain a suburl and should in that - # case be passed along to the site we're authenticating for. And of - # course, we fill a structure with information about the user. + # When we reach this point, the user *has* already been authenticated. + # The request variable "su" *may* contain a suburl and should in that + # case be passed along to the site we're authenticating for. And of + # course, we fill a structure with information about the user. - if request.user.first_name=='' or request.user.last_name=='' or request.user.email=='': - return render_pgweb(request, 'account', 'account/communityauth_noinfo.html', { - }) + if request.user.first_name=='' or request.user.last_name=='' or request.user.email=='': + return render_pgweb(request, 'account', 'account/communityauth_noinfo.html', { + }) - # Check for cooloff period - if site.cooloff_hours > 0: - if (datetime.now() - request.user.date_joined) < timedelta(hours=site.cooloff_hours): - log.warning("User {0} tried to log in to {1} before cooloff period ended.".format( - request.user.username, site.name)) - return render_pgweb(request, 'account', 'account/communityauth_cooloff.html', { - 'site': site, - }) + # Check for cooloff period + if site.cooloff_hours > 0: + if (datetime.now() - request.user.date_joined) < timedelta(hours=site.cooloff_hours): + log.warning("User {0} tried to log in to {1} before cooloff period ended.".format( + request.user.username, site.name)) + return render_pgweb(request, 'account', 'account/communityauth_cooloff.html', { + 'site': site, + }) - if site.org.require_consent: - if not CommunityAuthConsent.objects.filter(org=site.org, user=request.user).exists(): - return HttpResponseRedirect('/account/auth/{0}/consent/?{1}'.format(siteid, - urllib.urlencode({'next': '/account/auth/{0}/{1}'.format(siteid, urldata)}))) + if site.org.require_consent: + if not CommunityAuthConsent.objects.filter(org=site.org, user=request.user).exists(): + return HttpResponseRedirect('/account/auth/{0}/consent/?{1}'.format(siteid, + urllib.urlencode({'next': '/account/auth/{0}/{1}'.format(siteid, urldata)}))) - info = { - 'u': request.user.username.encode('utf-8'), - 'f': request.user.first_name.encode('utf-8'), - 'l': request.user.last_name.encode('utf-8'), - 'e': request.user.email.encode('utf-8'), - } - if d: - info['d'] = d.encode('utf-8') - elif su: - info['su'] = su.encode('utf-8') + info = { + 'u': request.user.username.encode('utf-8'), + 'f': request.user.first_name.encode('utf-8'), + 'l': request.user.last_name.encode('utf-8'), + 'e': request.user.email.encode('utf-8'), + } + if d: + info['d'] = d.encode('utf-8') + elif su: + info['su'] = su.encode('utf-8') - # Turn this into an URL. Make sure the timestamp is always first, that makes - # the first block more random.. - s = "t=%s&%s" % (int(time.time()), urllib.urlencode(info)) + # Turn this into an URL. Make sure the timestamp is always first, that makes + # the first block more random.. + s = "t=%s&%s" % (int(time.time()), urllib.urlencode(info)) - # Encrypt it with the shared key (and IV!) - r = Random.new() - iv = r.read(16) # Always 16 bytes for AES - encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv) - cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes + # Encrypt it with the shared key (and IV!) + r = Random.new() + iv = r.read(16) # Always 16 bytes for AES + encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv) + cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes - # Generate redirect - return HttpResponseRedirect("%s?i=%s&d=%s" % ( - site.redirecturl, - base64.b64encode(iv, "-_"), - base64.b64encode(cipher, "-_"), - )) + # Generate redirect + return HttpResponseRedirect("%s?i=%s&d=%s" % ( + site.redirecturl, + base64.b64encode(iv, "-_"), + base64.b64encode(cipher, "-_"), + )) def communityauth_logout(request, siteid): - # Get whatever site the user is trying to log in to. - site = get_object_or_404(CommunityAuthSite, pk=siteid) + # Get whatever site the user is trying to log in to. + site = get_object_or_404(CommunityAuthSite, pk=siteid) - if request.user.is_authenticated(): - django_logout(request) + if request.user.is_authenticated(): + django_logout(request) - # Redirect user back to the specified suburl - return HttpResponseRedirect("%s?s=logout" % site.redirecturl) + # Redirect user back to the specified suburl + return HttpResponseRedirect("%s?s=logout" % site.redirecturl) @login_required def communityauth_consent(request, siteid): - org = get_object_or_404(CommunityAuthSite, id=siteid).org - if request.method == 'POST': - form = CommunityAuthConsentForm(org.orgname, data=request.POST) - if form.is_valid(): - CommunityAuthConsent.objects.get_or_create(user=request.user, org=org, - defaults={'consentgiven':datetime.now()}, - ) - return HttpResponseRedirect(form.cleaned_data['next']) - else: - form = CommunityAuthConsentForm(org.orgname, initial={'next': request.GET.get('next', '')}) + org = get_object_or_404(CommunityAuthSite, id=siteid).org + if request.method == 'POST': + form = CommunityAuthConsentForm(org.orgname, data=request.POST) + if form.is_valid(): + CommunityAuthConsent.objects.get_or_create(user=request.user, org=org, + defaults={'consentgiven':datetime.now()}, + ) + return HttpResponseRedirect(form.cleaned_data['next']) + else: + form = CommunityAuthConsentForm(org.orgname, initial={'next': request.GET.get('next', '')}) - return render_pgweb(request, 'account', 'base/form.html', { - 'form': form, - 'operation': 'Authentication', - 'form_intro': 'The site you are about to log into is run by {0}. If you choose to proceed with this authentication, your name and email address will be shared with {1}.

Please confirm that you consent to this sharing.'.format(org.orgname, org.orgname), - 'savebutton': 'Proceed with login', - }) + return render_pgweb(request, 'account', 'base/form.html', { + 'form': form, + 'operation': 'Authentication', + 'form_intro': 'The site you are about to log into is run by {0}. If you choose to proceed with this authentication, your name and email address will be shared with {1}.

Please confirm that you consent to this sharing.'.format(org.orgname, org.orgname), + 'savebutton': 'Proceed with login', + }) def _encrypt_site_response(site, s): - # Encrypt it with the shared key (and IV!) - r = Random.new() - iv = r.read(16) # Always 16 bytes for AES - encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv) - cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes + # Encrypt it with the shared key (and IV!) + r = Random.new() + iv = r.read(16) # Always 16 bytes for AES + encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv) + cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes - # Base64-encode the response, just to be consistent - return "%s&%s" % ( - base64.b64encode(iv, '-_'), - base64.b64encode(cipher, '-_'), - ) + # Base64-encode the response, just to be consistent + return "%s&%s" % ( + base64.b64encode(iv, '-_'), + base64.b64encode(cipher, '-_'), + ) def communityauth_search(request, siteid): - # Perform a search for users. The response will be encrypted with the site - # key to prevent abuse, therefor we need the site. - site = get_object_or_404(CommunityAuthSite, pk=siteid) + # Perform a search for users. The response will be encrypted with the site + # key to prevent abuse, therefor we need the site. + site = get_object_or_404(CommunityAuthSite, pk=siteid) - q = Q(is_active=True) - if request.GET.has_key('s') and request.GET['s']: - # General search term, match both name and email - q = q & (Q(email__icontains=request.GET['s']) | Q(first_name__icontains=request.GET['s']) | Q(last_name__icontains=request.GET['s'])) - elif request.GET.has_key('e') and request.GET['e']: - q = q & Q(email__icontains=request.GET['e']) - elif request.GET.has_key('n') and request.GET['n']: - q = q & (Q(first_name__icontains=request.GET['n']) | Q(last_name__icontains=request.GET['n'])) - elif request.GET.has_key('u') and request.GET['u']: - q = q & Q(username=request.GET['u']) - else: - raise Http404('No search term specified') + q = Q(is_active=True) + if request.GET.has_key('s') and request.GET['s']: + # General search term, match both name and email + q = q & (Q(email__icontains=request.GET['s']) | Q(first_name__icontains=request.GET['s']) | Q(last_name__icontains=request.GET['s'])) + elif request.GET.has_key('e') and request.GET['e']: + q = q & Q(email__icontains=request.GET['e']) + elif request.GET.has_key('n') and request.GET['n']: + q = q & (Q(first_name__icontains=request.GET['n']) | Q(last_name__icontains=request.GET['n'])) + elif request.GET.has_key('u') and request.GET['u']: + q = q & Q(username=request.GET['u']) + else: + raise Http404('No search term specified') - users = User.objects.filter(q) + users = User.objects.filter(q) - j = json.dumps([{'u': u.username, 'e': u.email, 'f': u.first_name, 'l': u.last_name} for u in users]) + j = json.dumps([{'u': u.username, 'e': u.email, 'f': u.first_name, 'l': u.last_name} for u in users]) - return HttpResponse(_encrypt_site_response(site, j)) + return HttpResponse(_encrypt_site_response(site, j)) def communityauth_getkeys(request, siteid, since=None): - # Get any updated ssh keys for community accounts. - # The response will be encrypted with the site key to prevent abuse, - # therefor we need the site. - site = get_object_or_404(CommunityAuthSite, pk=siteid) + # Get any updated ssh keys for community accounts. + # The response will be encrypted with the site key to prevent abuse, + # therefor we need the site. + site = get_object_or_404(CommunityAuthSite, pk=siteid) - if since: - keys = UserProfile.objects.select_related('user').filter(lastmodified__gte=datetime.fromtimestamp(int(since.replace('/', '')))).exclude(sshkey='') - else: - keys = UserProfile.objects.select_related('user').all().exclude(sshkey='') + if since: + keys = UserProfile.objects.select_related('user').filter(lastmodified__gte=datetime.fromtimestamp(int(since.replace('/', '')))).exclude(sshkey='') + else: + keys = UserProfile.objects.select_related('user').all().exclude(sshkey='') - j = json.dumps([{'u': k.user.username, 's': k.sshkey} for k in keys]) + j = json.dumps([{'u': k.user.username, 's': k.sshkey} for k in keys]) - return HttpResponse(_encrypt_site_response(site, j)) + return HttpResponse(_encrypt_site_response(site, j)) diff --git a/pgweb/contributors/admin.py b/pgweb/contributors/admin.py index 9f2204e7..2f0ec4f2 100644 --- a/pgweb/contributors/admin.py +++ b/pgweb/contributors/admin.py @@ -8,20 +8,20 @@ from pgweb.core.lookups import UserLookup from models import Contributor, ContributorType class ContributorAdminForm(forms.ModelForm): - class Meta: - model = Contributor - exclude = () - widgets = { - 'user': AutoCompleteSelectWidget(lookup_class=UserLookup), - } + class Meta: + model = Contributor + exclude = () + widgets = { + 'user': AutoCompleteSelectWidget(lookup_class=UserLookup), + } - def __init__(self, *args, **kwargs): - super(ContributorAdminForm, self).__init__(*args, **kwargs) - self.fields['user'].widget.can_add_related = False - self.fields['user'].widget.can_change_related = False + def __init__(self, *args, **kwargs): + super(ContributorAdminForm, self).__init__(*args, **kwargs) + self.fields['user'].widget.can_add_related = False + self.fields['user'].widget.can_change_related = False class ContributorAdmin(admin.ModelAdmin): - form = ContributorAdminForm + form = ContributorAdminForm admin.site.register(ContributorType) admin.site.register(Contributor, ContributorAdmin) diff --git a/pgweb/contributors/models.py b/pgweb/contributors/models.py index 789e203e..43cd3bae 100644 --- a/pgweb/contributors/models.py +++ b/pgweb/contributors/models.py @@ -2,36 +2,36 @@ from django.db import models from django.contrib.auth.models import User class ContributorType(models.Model): - typename = models.CharField(max_length=32, null=False, blank=False) - sortorder = models.IntegerField(null=False, default=100) - extrainfo = models.TextField(null=True, blank=True) - detailed = models.BooleanField(null=False, default=True) - showemail = models.BooleanField(null=False, default=True) + typename = models.CharField(max_length=32, null=False, blank=False) + sortorder = models.IntegerField(null=False, default=100) + extrainfo = models.TextField(null=True, blank=True) + detailed = models.BooleanField(null=False, default=True) + showemail = models.BooleanField(null=False, default=True) - purge_urls = ('/community/contributors/', ) + purge_urls = ('/community/contributors/', ) - def __unicode__(self): - return self.typename + def __unicode__(self): + return self.typename - class Meta: - ordering = ('sortorder',) + class Meta: + ordering = ('sortorder',) class Contributor(models.Model): - ctype = models.ForeignKey(ContributorType) - lastname = models.CharField(max_length=100, null=False, blank=False) - firstname = models.CharField(max_length=100, null=False, blank=False) - email = models.EmailField(null=False, blank=True) - company = models.CharField(max_length=100, null=True, blank=True) - companyurl = models.URLField(max_length=100, null=True, blank=True, verbose_name='Company URL') - location = models.CharField(max_length=100, null=True, blank=True) - contribution = models.TextField(null=True, blank=True) - user = models.ForeignKey(User, null=True, blank=True) + ctype = models.ForeignKey(ContributorType) + lastname = models.CharField(max_length=100, null=False, blank=False) + firstname = models.CharField(max_length=100, null=False, blank=False) + email = models.EmailField(null=False, blank=True) + company = models.CharField(max_length=100, null=True, blank=True) + companyurl = models.URLField(max_length=100, null=True, blank=True, verbose_name='Company URL') + location = models.CharField(max_length=100, null=True, blank=True) + contribution = models.TextField(null=True, blank=True) + user = models.ForeignKey(User, null=True, blank=True) - send_notification=True - purge_urls = ('/community/contributors/', ) + send_notification=True + purge_urls = ('/community/contributors/', ) - def __unicode__(self): - return "%s %s" % (self.firstname, self.lastname) + def __unicode__(self): + return "%s %s" % (self.firstname, self.lastname) - class Meta: - ordering = ('lastname', 'firstname',) + class Meta: + ordering = ('lastname', 'firstname',) diff --git a/pgweb/contributors/struct.py b/pgweb/contributors/struct.py index 02c04ca1..9bd244c9 100644 --- a/pgweb/contributors/struct.py +++ b/pgweb/contributors/struct.py @@ -1,2 +1,2 @@ def get_struct(): - yield ('community/contributors/', None) + yield ('community/contributors/', None) diff --git a/pgweb/contributors/views.py b/pgweb/contributors/views.py index 31027de5..0be074a4 100644 --- a/pgweb/contributors/views.py +++ b/pgweb/contributors/views.py @@ -3,7 +3,7 @@ from pgweb.util.contexts import render_pgweb from models import ContributorType def completelist(request): - contributortypes = list(ContributorType.objects.all()) - return render_pgweb(request, 'community', 'contributors/list.html', { - 'contributortypes': contributortypes, - }) + contributortypes = list(ContributorType.objects.all()) + return render_pgweb(request, 'community', 'contributors/list.html', { + 'contributortypes': contributortypes, + }) diff --git a/pgweb/core/admin.py b/pgweb/core/admin.py index 40cff332..b03f1de0 100644 --- a/pgweb/core/admin.py +++ b/pgweb/core/admin.py @@ -10,28 +10,28 @@ from pgweb.core.models import ModerationNotification from pgweb.core.lookups import UserLookup class OrganisationAdminForm(forms.ModelForm): - class Meta: - model = Organisation - exclude = () - widgets = { - 'managers': AutoCompleteSelectMultipleWidget(lookup_class=UserLookup), - } + class Meta: + model = Organisation + exclude = () + widgets = { + 'managers': AutoCompleteSelectMultipleWidget(lookup_class=UserLookup), + } - def __init__(self, *args, **kwargs): - super(OrganisationAdminForm, self).__init__(*args, **kwargs) - self.fields['managers'].widget.can_add_related = False - self.fields['managers'].widget.can_change_related = False - self.fields['managers'].widget.can_delete_related = False + def __init__(self, *args, **kwargs): + super(OrganisationAdminForm, self).__init__(*args, **kwargs) + self.fields['managers'].widget.can_add_related = False + self.fields['managers'].widget.can_change_related = False + self.fields['managers'].widget.can_delete_related = False class OrganisationAdmin(admin.ModelAdmin): - form = OrganisationAdminForm - list_display = ('name', 'approved', 'lastconfirmed',) - list_filter = ('approved',) - ordering = ('name', ) - search_fields = ('name', ) + form = OrganisationAdminForm + list_display = ('name', 'approved', 'lastconfirmed',) + list_filter = ('approved',) + ordering = ('name', ) + search_fields = ('name', ) class VersionAdmin(admin.ModelAdmin): - list_display = ('versionstring', 'reldate', 'supported', 'current', ) + list_display = ('versionstring', 'reldate', 'supported', 'current', ) admin.site.register(Version, VersionAdmin) admin.site.register(OrganisationType) diff --git a/pgweb/core/feeds.py b/pgweb/core/feeds.py index 076ddd4d..45a0ee67 100644 --- a/pgweb/core/feeds.py +++ b/pgweb/core/feeds.py @@ -5,18 +5,18 @@ from models import Version from datetime import datetime, time class VersionFeed(Feed): - title = "PostgreSQL latest versions" - link = "https://www.postgresql.org/" - description = "PostgreSQL latest versions" + title = "PostgreSQL latest versions" + link = "https://www.postgresql.org/" + description = "PostgreSQL latest versions" - description_template = 'core/version_rss_description.html' - title_template = 'core/version_rss_title.html' + description_template = 'core/version_rss_description.html' + title_template = 'core/version_rss_title.html' - def items(self): - return Version.objects.filter(tree__gt=0).filter(testing=0) + def items(self): + return Version.objects.filter(tree__gt=0).filter(testing=0) - def item_link(self, obj): - return "https://www.postgresql.org/docs/%s/%s" % (obj.numtree, obj.relnotes) + def item_link(self, obj): + return "https://www.postgresql.org/docs/%s/%s" % (obj.numtree, obj.relnotes) - def item_pubdate(self, obj): - return datetime.combine(obj.reldate,time.min) + def item_pubdate(self, obj): + return datetime.combine(obj.reldate,time.min) diff --git a/pgweb/core/forms.py b/pgweb/core/forms.py index 917615e9..80e41876 100644 --- a/pgweb/core/forms.py +++ b/pgweb/core/forms.py @@ -5,60 +5,60 @@ from models import Organisation from django.contrib.auth.models import User class OrganisationForm(forms.ModelForm): - remove_manager = forms.ModelMultipleChoiceField(required=False, queryset=None, label="Current manager(s)", help_text="Select one or more managers to remove") - add_manager = forms.EmailField(required=False) + remove_manager = forms.ModelMultipleChoiceField(required=False, queryset=None, label="Current manager(s)", help_text="Select one or more managers to remove") + add_manager = forms.EmailField(required=False) - class Meta: - model = Organisation - exclude = ('lastconfirmed', 'approved', 'managers', ) + class Meta: + model = Organisation + exclude = ('lastconfirmed', 'approved', 'managers', ) - def __init__(self, *args, **kwargs): - super(OrganisationForm, self).__init__(*args, **kwargs) - if self.instance and self.instance.pk: - self.fields['remove_manager'].queryset = self.instance.managers.all() - else: - del self.fields['remove_manager'] - del self.fields['add_manager'] + def __init__(self, *args, **kwargs): + super(OrganisationForm, self).__init__(*args, **kwargs) + if self.instance and self.instance.pk: + self.fields['remove_manager'].queryset = self.instance.managers.all() + else: + del self.fields['remove_manager'] + del self.fields['add_manager'] - def clean_add_manager(self): - if self.cleaned_data['add_manager']: - # Something was added as manager - let's make sure the user exists - try: - User.objects.get(email=self.cleaned_data['add_manager'].lower()) - except User.DoesNotExist: - raise ValidationError("User with email %s not found" % self.cleaned_data['add_manager']) + def clean_add_manager(self): + if self.cleaned_data['add_manager']: + # Something was added as manager - let's make sure the user exists + try: + User.objects.get(email=self.cleaned_data['add_manager'].lower()) + except User.DoesNotExist: + raise ValidationError("User with email %s not found" % self.cleaned_data['add_manager']) - return self.cleaned_data['add_manager'] + return self.cleaned_data['add_manager'] - def clean_remove_manager(self): - if self.cleaned_data['remove_manager']: - removecount = 0 - for toremove in self.cleaned_data['remove_manager']: - if toremove in self.instance.managers.all(): - removecount += 1 + def clean_remove_manager(self): + if self.cleaned_data['remove_manager']: + removecount = 0 + for toremove in self.cleaned_data['remove_manager']: + if toremove in self.instance.managers.all(): + removecount += 1 - if len(self.instance.managers.all()) - removecount <= 0: - raise ValidationError("Cannot remove all managers from an organsation!") - return self.cleaned_data['remove_manager'] + if len(self.instance.managers.all()) - removecount <= 0: + raise ValidationError("Cannot remove all managers from an organsation!") + return self.cleaned_data['remove_manager'] - def save(self, commit=True): - model = super(OrganisationForm, self).save(commit=False) - if self.cleaned_data.has_key('add_manager') and self.cleaned_data['add_manager']: - model.managers.add(User.objects.get(email=self.cleaned_data['add_manager'].lower())) - if self.cleaned_data.has_key('remove_manager') and self.cleaned_data['remove_manager']: - for toremove in self.cleaned_data['remove_manager']: - model.managers.remove(toremove) + def save(self, commit=True): + model = super(OrganisationForm, self).save(commit=False) + if self.cleaned_data.has_key('add_manager') and self.cleaned_data['add_manager']: + model.managers.add(User.objects.get(email=self.cleaned_data['add_manager'].lower())) + if self.cleaned_data.has_key('remove_manager') and self.cleaned_data['remove_manager']: + for toremove in self.cleaned_data['remove_manager']: + model.managers.remove(toremove) - return model + return model - def apply_submitter(self, model, User): - model.managers.add(User) + def apply_submitter(self, model, User): + model.managers.add(User) class MergeOrgsForm(forms.Form): - merge_into = forms.ModelChoiceField(queryset=Organisation.objects.all()) - merge_from = forms.ModelChoiceField(queryset=Organisation.objects.all()) + merge_into = forms.ModelChoiceField(queryset=Organisation.objects.all()) + merge_from = forms.ModelChoiceField(queryset=Organisation.objects.all()) - def clean(self): - if self.cleaned_data['merge_into'] == self.cleaned_data['merge_from']: - raise ValidationError("The two organisations selected must be different!") - return self.cleaned_data + def clean(self): + if self.cleaned_data['merge_into'] == self.cleaned_data['merge_from']: + raise ValidationError("The two organisations selected must be different!") + return self.cleaned_data diff --git a/pgweb/core/lookups.py b/pgweb/core/lookups.py index 806d1d8a..3f64cb2e 100644 --- a/pgweb/core/lookups.py +++ b/pgweb/core/lookups.py @@ -6,20 +6,20 @@ from selectable.decorators import staff_member_required @staff_member_required class UserLookup(ModelLookup): - model = User - search_fields = ( - 'username__icontains', - 'first_name__icontains', - 'last_name__icontains', - ) - filters = {'is_active': True, } + model = User + search_fields = ( + 'username__icontains', + 'first_name__icontains', + 'last_name__icontains', + ) + filters = {'is_active': True, } - def get_item_value(self, item): - # Display for currently selected item - return u"%s (%s)" % (item.username, item.get_full_name()) + def get_item_value(self, item): + # Display for currently selected item + return u"%s (%s)" % (item.username, item.get_full_name()) - def get_item_label(self, item): - # Display for choice listings - return u"%s (%s)" % (item.username, item.get_full_name()) + def get_item_label(self, item): + # Display for choice listings + return u"%s (%s)" % (item.username, item.get_full_name()) registry.register(UserLookup) diff --git a/pgweb/core/management/commands/cleanup_old_records.py b/pgweb/core/management/commands/cleanup_old_records.py index b971b76f..4fda8693 100644 --- a/pgweb/core/management/commands/cleanup_old_records.py +++ b/pgweb/core/management/commands/cleanup_old_records.py @@ -18,18 +18,18 @@ from datetime import datetime, timedelta from pgweb.account.models import EmailChangeToken class Command(BaseCommand): - help = 'Cleanup old records' + help = 'Cleanup old records' - def handle(self, *args, **options): - # Grab advisory lock, if available. Lock id is just a random number - # since we only need to interlock against ourselves. The lock is - # automatically released when we're done. - curs = connection.cursor() - curs.execute("SELECT pg_try_advisory_lock(2896719)") - if not curs.fetchall()[0][0]: - print "Failed to get advisory lock, existing cleanup_old_records process stuck?" - sys.exit(1) + def handle(self, *args, **options): + # Grab advisory lock, if available. Lock id is just a random number + # since we only need to interlock against ourselves. The lock is + # automatically released when we're done. + curs = connection.cursor() + curs.execute("SELECT pg_try_advisory_lock(2896719)") + if not curs.fetchall()[0][0]: + print "Failed to get advisory lock, existing cleanup_old_records process stuck?" + sys.exit(1) - # Clean up old email change tokens - with transaction.atomic(): - EmailChangeToken.objects.filter(sentat__lt=datetime.now()-timedelta(hours=24)).delete() + # Clean up old email change tokens + with transaction.atomic(): + EmailChangeToken.objects.filter(sentat__lt=datetime.now()-timedelta(hours=24)).delete() diff --git a/pgweb/core/management/commands/fetch_rss_feeds.py b/pgweb/core/management/commands/fetch_rss_feeds.py index 20f31704..44c6ee7c 100644 --- a/pgweb/core/management/commands/fetch_rss_feeds.py +++ b/pgweb/core/management/commands/fetch_rss_feeds.py @@ -14,39 +14,39 @@ from datetime import datetime from pgweb.core.models import ImportedRSSFeed, ImportedRSSItem class Command(BaseCommand): - help = 'Fetch RSS feeds' + help = 'Fetch RSS feeds' - def handle(self, *args, **options): - socket.setdefaulttimeout(20) + def handle(self, *args, **options): + socket.setdefaulttimeout(20) - with transaction.atomic(): - for importfeed in ImportedRSSFeed.objects.all(): - try: - feed = feedparser.parse(importfeed.url) + with transaction.atomic(): + for importfeed in ImportedRSSFeed.objects.all(): + try: + feed = feedparser.parse(importfeed.url) - if not hasattr(feed, 'status'): - # bozo_excpetion can seemingly be set when there is no error as well, - # so make sure we only check if we didn't get a status. - if hasattr(feed,'bozo_exception'): - raise Exception('Feed load error %s' % feed.bozo_exception) - raise Exception('Feed load error with no exception!') - if feed.status != 200: - raise Exception('Feed returned status %s' % feed.status) + if not hasattr(feed, 'status'): + # bozo_excpetion can seemingly be set when there is no error as well, + # so make sure we only check if we didn't get a status. + if hasattr(feed,'bozo_exception'): + raise Exception('Feed load error %s' % feed.bozo_exception) + raise Exception('Feed load error with no exception!') + if feed.status != 200: + raise Exception('Feed returned status %s' % feed.status) - fetchedsomething = False - for entry in feed.entries: - try: - item = ImportedRSSItem.objects.get(feed=importfeed, url=entry.link) - except ImportedRSSItem.DoesNotExist: - item = ImportedRSSItem(feed=importfeed, - title=entry.title[:100], - url=entry.link, - posttime=datetime(*(entry.published_parsed[0:6])), - ) - item.save() - fetchedsomething = True + fetchedsomething = False + for entry in feed.entries: + try: + item = ImportedRSSItem.objects.get(feed=importfeed, url=entry.link) + except ImportedRSSItem.DoesNotExist: + item = ImportedRSSItem(feed=importfeed, + title=entry.title[:100], + url=entry.link, + posttime=datetime(*(entry.published_parsed[0:6])), + ) + item.save() + fetchedsomething = True - if fetchedsomething: - importfeed.purge_related() - except Exception, e: - print "Failed to load %s: %s" % (importfeed, e) + if fetchedsomething: + importfeed.purge_related() + except Exception, e: + print "Failed to load %s: %s" % (importfeed, e) diff --git a/pgweb/core/management/commands/moderation_report.py b/pgweb/core/management/commands/moderation_report.py index edd0bc7c..cdc3f28a 100644 --- a/pgweb/core/management/commands/moderation_report.py +++ b/pgweb/core/management/commands/moderation_report.py @@ -15,17 +15,17 @@ from pgweb.util.moderation import get_all_pending_moderations from pgweb.util.misc import send_template_mail class Command(BaseCommand): - help = 'Send moderation report' + help = 'Send moderation report' - def handle(self, *args, **options): - with transaction.atomic(): - counts = [{'name': unicode(x['name']), 'count': len(x['entries'])} for x in get_all_pending_moderations()] - if len(counts): - # Generate an email and send it off - send_template_mail(settings.NOTIFICATION_FROM, - settings.NOTIFICATION_EMAIL, - "PostgreSQL moderation report: %s" % datetime.now(), - "core/moderation_report.txt", - { - 'items': counts, - }) + def handle(self, *args, **options): + with transaction.atomic(): + counts = [{'name': unicode(x['name']), 'count': len(x['entries'])} for x in get_all_pending_moderations()] + if len(counts): + # Generate an email and send it off + send_template_mail(settings.NOTIFICATION_FROM, + settings.NOTIFICATION_EMAIL, + "PostgreSQL moderation report: %s" % datetime.now(), + "core/moderation_report.txt", + { + 'items': counts, + }) diff --git a/pgweb/core/management/commands/sessioninfo.py b/pgweb/core/management/commands/sessioninfo.py index a99f1adf..aa5aaa3e 100644 --- a/pgweb/core/management/commands/sessioninfo.py +++ b/pgweb/core/management/commands/sessioninfo.py @@ -7,36 +7,36 @@ from django.contrib.sessions.models import Session from django.contrib.auth.models import User class Command(BaseCommand): - help = 'Dump interesting information about a session' + help = 'Dump interesting information about a session' - def add_arguments(self, parser): - parser.add_argument('sessionid') + def add_arguments(self, parser): + parser.add_argument('sessionid') - def handle(self, *args, **options): - try: - session = Session.objects.get(session_key=options['sessionid']).get_decoded() - uid = session.get('_auth_user_id') + def handle(self, *args, **options): + try: + session = Session.objects.get(session_key=options['sessionid']).get_decoded() + uid = session.get('_auth_user_id') - print u"Session {0}".format(options['sessionid']) + print u"Session {0}".format(options['sessionid']) - try: - user = User.objects.get(pk=uid) - print " -- Logged in user --" - print u"Userid: {0}".format(uid) - print u"Username: {0}".format(user.username) - print u"Name: {0}".format(user.get_full_name()) - print u"Email: {0}".format(user.email) - except User.DoesNotExist: - print "** Associated user not found. Maybe not logged in?" + try: + user = User.objects.get(pk=uid) + print " -- Logged in user --" + print u"Userid: {0}".format(uid) + print u"Username: {0}".format(user.username) + print u"Name: {0}".format(user.get_full_name()) + print u"Email: {0}".format(user.email) + except User.DoesNotExist: + print "** Associated user not found. Maybe not logged in?" - # Remove known keys - for k in ('_auth_user_id', '_auth_user_hash', '_auth_user_backend'): - session.pop(k, None) - if session: - print " -- Other session values --" - for k,v in session.items(): - print u"{0:20} {1}".format(k,v) + # Remove known keys + for k in ('_auth_user_id', '_auth_user_hash', '_auth_user_backend'): + session.pop(k, None) + if session: + print " -- Other session values --" + for k,v in session.items(): + print u"{0:20} {1}".format(k,v) - except Session.DoesNotExist: - raise CommandError('Session not found') + except Session.DoesNotExist: + raise CommandError('Session not found') diff --git a/pgweb/core/models.py b/pgweb/core/models.py index 8edb07bb..d1d74bd9 100644 --- a/pgweb/core/models.py +++ b/pgweb/core/models.py @@ -6,203 +6,203 @@ from pgweb.util.misc import varnish_purge import base64 TESTING_CHOICES = ( - (0, 'Release'), - (1, 'Release candidate'), - (2, 'Beta'), - (3, 'Alpha'), - ) + (0, 'Release'), + (1, 'Release candidate'), + (2, 'Beta'), + (3, 'Alpha'), + ) TESTING_SHORTSTRING = ('', 'rc', 'beta', 'alpha') class Version(models.Model): - tree = models.DecimalField(max_digits=3, decimal_places=1, null=False, blank=False, unique=True) - latestminor = models.IntegerField(null=False, blank=False, default=0, help_text="For testing versions, latestminor means latest beta/rc number. For other releases, it's the latest minor release number in the tree.") - reldate = models.DateField(null=False, blank=False) - relnotes = models.CharField(max_length=32, null=False, blank=False) - current = models.BooleanField(null=False, blank=False, default=False) - supported = models.BooleanField(null=False, blank=False, default=True) - testing = models.IntegerField(null=False, blank=False, default=0, help_text="Testing level of this release. latestminor indicates beta/rc number", choices=TESTING_CHOICES) - docsloaded = models.DateTimeField(null=True, blank=True, help_text="The timestamp of the latest docs load. Used to control indexing and info on developer docs.") - firstreldate = models.DateField(null=False, blank=False, help_text="The date of the .0 release in this tree") - eoldate = models.DateField(null=False, blank=False, help_text="The final release date for this tree") + tree = models.DecimalField(max_digits=3, decimal_places=1, null=False, blank=False, unique=True) + latestminor = models.IntegerField(null=False, blank=False, default=0, help_text="For testing versions, latestminor means latest beta/rc number. For other releases, it's the latest minor release number in the tree.") + reldate = models.DateField(null=False, blank=False) + relnotes = models.CharField(max_length=32, null=False, blank=False) + current = models.BooleanField(null=False, blank=False, default=False) + supported = models.BooleanField(null=False, blank=False, default=True) + testing = models.IntegerField(null=False, blank=False, default=0, help_text="Testing level of this release. latestminor indicates beta/rc number", choices=TESTING_CHOICES) + docsloaded = models.DateTimeField(null=True, blank=True, help_text="The timestamp of the latest docs load. Used to control indexing and info on developer docs.") + firstreldate = models.DateField(null=False, blank=False, help_text="The date of the .0 release in this tree") + eoldate = models.DateField(null=False, blank=False, help_text="The final release date for this tree") - def __unicode__(self): - return self.versionstring + def __unicode__(self): + return self.versionstring - @property - def versionstring(self): - return self.buildversionstring(self.latestminor) + @property + def versionstring(self): + return self.buildversionstring(self.latestminor) - @property - def numtree(self): - # Return the proper numeric tree version, taking into account that PostgreSQL 10 - # changed from x.y to x for major version. - if self.tree >= 10: - return int(self.tree) - else: - return self.tree + @property + def numtree(self): + # Return the proper numeric tree version, taking into account that PostgreSQL 10 + # changed from x.y to x for major version. + if self.tree >= 10: + return int(self.tree) + else: + return self.tree - def buildversionstring(self, minor): - if not self.testing: - return "%s.%s" % (self.numtree, minor) - else: - return "%s%s%s" % (self.numtree, TESTING_SHORTSTRING[self.testing], minor) + def buildversionstring(self, minor): + if not self.testing: + return "%s.%s" % (self.numtree, minor) + else: + return "%s%s%s" % (self.numtree, TESTING_SHORTSTRING[self.testing], minor) - @property - def treestring(self): - if not self.testing: - return "%s" % self.numtree - else: - return "%s %s" % (self.numtree, TESTING_SHORTSTRING[self.testing]) + @property + def treestring(self): + if not self.testing: + return "%s" % self.numtree + else: + return "%s %s" % (self.numtree, TESTING_SHORTSTRING[self.testing]) - def save(self): - # Make sure only one version at a time can be the current one. - # (there may be some small race conditions here, but the likelyhood - # that two admins are editing the version list at the same time...) - if self.current: - previous = Version.objects.filter(current=True) - for p in previous: - if not p == self: - p.current = False - p.save() # primary key check avoids recursion + def save(self): + # Make sure only one version at a time can be the current one. + # (there may be some small race conditions here, but the likelyhood + # that two admins are editing the version list at the same time...) + if self.current: + previous = Version.objects.filter(current=True) + for p in previous: + if not p == self: + p.current = False + p.save() # primary key check avoids recursion - # Now that we've made any previously current ones non-current, we are - # free to save this one. - super(Version, self).save() + # Now that we've made any previously current ones non-current, we are + # free to save this one. + super(Version, self).save() - class Meta: - ordering = ('-tree', ) + class Meta: + ordering = ('-tree', ) - def purge_urls(self): - yield '/$' - yield '/support/versioning' - yield '/support/security' - yield '/docs/$' - yield '/docs/manuals' - yield '/about/featurematrix/$' - yield '/versions.rss' + def purge_urls(self): + yield '/$' + yield '/support/versioning' + yield '/support/security' + yield '/docs/$' + yield '/docs/manuals' + yield '/about/featurematrix/$' + yield '/versions.rss' class Country(models.Model): - name = models.CharField(max_length=100, null=False, blank=False) - tld = models.CharField(max_length=3, null=False, blank=False) + name = models.CharField(max_length=100, null=False, blank=False) + tld = models.CharField(max_length=3, null=False, blank=False) - class Meta: - db_table = 'countries' - ordering = ('name',) - verbose_name = 'Country' - verbose_name_plural = 'Countries' + class Meta: + db_table = 'countries' + ordering = ('name',) + verbose_name = 'Country' + verbose_name_plural = 'Countries' - def __unicode__(self): - return self.name + def __unicode__(self): + return self.name class Language(models.Model): - # Import data from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt - # (yes, there is a UTF16 BOM in the UTF8 file) - # (and yes, there is a 7 length value in a field specified as 3 chars) - alpha3 = models.CharField(max_length=7, null=False, blank=False, primary_key=True) - alpha3term = models.CharField(max_length=3, null=False, blank=True) - alpha2 = models.CharField(max_length=2, null=False, blank=True) - name = models.CharField(max_length=100, null=False, blank=False) - frenchname = models.CharField(max_length=100, null=False, blank=False) + # Import data from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt + # (yes, there is a UTF16 BOM in the UTF8 file) + # (and yes, there is a 7 length value in a field specified as 3 chars) + alpha3 = models.CharField(max_length=7, null=False, blank=False, primary_key=True) + alpha3term = models.CharField(max_length=3, null=False, blank=True) + alpha2 = models.CharField(max_length=2, null=False, blank=True) + name = models.CharField(max_length=100, null=False, blank=False) + frenchname = models.CharField(max_length=100, null=False, blank=False) - class Meta: - ordering = ('name', ) + class Meta: + ordering = ('name', ) - def __unicode__(self): - return self.name + def __unicode__(self): + return self.name class OrganisationType(models.Model): - typename = models.CharField(max_length=32, null=False, blank=False) + typename = models.CharField(max_length=32, null=False, blank=False) - def __unicode__(self): - return self.typename + def __unicode__(self): + return self.typename class Organisation(models.Model): - name = models.CharField(max_length=100, null=False, blank=False, unique=True) - approved = models.BooleanField(null=False, default=False) - address = models.TextField(null=False, blank=True) - url = models.URLField(null=False, blank=False) - email = models.EmailField(null=False, blank=True) - phone = models.CharField(max_length=100, null=False, blank=True) - orgtype = models.ForeignKey(OrganisationType, null=False, blank=False, verbose_name="Organisation type") - managers = models.ManyToManyField(User, blank=False) - lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True) + name = models.CharField(max_length=100, null=False, blank=False, unique=True) + approved = models.BooleanField(null=False, default=False) + address = models.TextField(null=False, blank=True) + url = models.URLField(null=False, blank=False) + email = models.EmailField(null=False, blank=True) + phone = models.CharField(max_length=100, null=False, blank=True) + orgtype = models.ForeignKey(OrganisationType, null=False, blank=False, verbose_name="Organisation type") + managers = models.ManyToManyField(User, blank=False) + lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True) - send_notification = True - send_m2m_notification = True + send_notification = True + send_m2m_notification = True - def __unicode__(self): - return self.name + def __unicode__(self): + return self.name - class Meta: - ordering = ('name',) + class Meta: + ordering = ('name',) # Basic classes for importing external RSS feeds, such as planet class ImportedRSSFeed(models.Model): - internalname = models.CharField(max_length=32, null=False, blank=False, unique=True) - url = models.URLField(null=False, blank=False) - purgepattern = models.CharField(max_length=512, null=False, blank=True, help_text="NOTE! Pattern will be automatically anchored with ^ at the beginning, but you must lead with a slash in most cases - and don't forget to include the trailing $ in most cases") + internalname = models.CharField(max_length=32, null=False, blank=False, unique=True) + url = models.URLField(null=False, blank=False) + purgepattern = models.CharField(max_length=512, null=False, blank=True, help_text="NOTE! Pattern will be automatically anchored with ^ at the beginning, but you must lead with a slash in most cases - and don't forget to include the trailing $ in most cases") - def purge_related(self): - if self.purgepattern: - varnish_purge(self.purgepattern) + def purge_related(self): + if self.purgepattern: + varnish_purge(self.purgepattern) - def __unicode__(self): - return self.internalname + def __unicode__(self): + return self.internalname class ImportedRSSItem(models.Model): - feed = models.ForeignKey(ImportedRSSFeed) - title = models.CharField(max_length=100, null=False, blank=False) - url = models.URLField(null=False, blank=False) - posttime = models.DateTimeField(null=False, blank=False) + feed = models.ForeignKey(ImportedRSSFeed) + title = models.CharField(max_length=100, null=False, blank=False) + url = models.URLField(null=False, blank=False) + posttime = models.DateTimeField(null=False, blank=False) - def __unicode__(self): - return self.title + def __unicode__(self): + return self.title - @property - def date(self): - return self.posttime.strftime("%Y-%m-%d") + @property + def date(self): + return self.posttime.strftime("%Y-%m-%d") # From man sshd, except for ssh-dss _valid_keytypes = ['ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519', 'ssh-rsa'] # Options, keytype, key, comment. But we don't support options. def validate_sshkey(key): - lines = key.splitlines() - for k in lines: - pieces = k.split() - if len(pieces) == 0: - raise ValidationError("Empty keys are not allowed") - if len(pieces) > 3: - raise ValidationError('Paste each ssh key without options, e.g. "ssh-rsa AAAAbbbcc mykey@machine"') - if pieces[0] == 'ssh-dss': - raise ValidationError("For security reasons, ssh-dss keys are not supported") - if pieces[0] not in _valid_keytypes: - raise ValidationError(u"Only keys of types {0} are supported, not {1}.".format(", ".join(_valid_keytypes), pieces[0])) - try: - base64.b64decode(pieces[1]) - except: - raise ValidationError("Incorrect base64 encoded key!") + lines = key.splitlines() + for k in lines: + pieces = k.split() + if len(pieces) == 0: + raise ValidationError("Empty keys are not allowed") + if len(pieces) > 3: + raise ValidationError('Paste each ssh key without options, e.g. "ssh-rsa AAAAbbbcc mykey@machine"') + if pieces[0] == 'ssh-dss': + raise ValidationError("For security reasons, ssh-dss keys are not supported") + if pieces[0] not in _valid_keytypes: + raise ValidationError(u"Only keys of types {0} are supported, not {1}.".format(", ".join(_valid_keytypes), pieces[0])) + try: + base64.b64decode(pieces[1]) + except: + raise ValidationError("Incorrect base64 encoded key!") # Extra attributes for users (if they have them) class UserProfile(models.Model): - user = models.OneToOneField(User, null=False, blank=False, primary_key=True) - sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text= "Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ]) - lastmodified = models.DateTimeField(null=False, blank=False, auto_now=True) + user = models.OneToOneField(User, null=False, blank=False, primary_key=True) + sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text= "Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ]) + lastmodified = models.DateTimeField(null=False, blank=False, auto_now=True) # Notifications sent for any moderated content. # Yes, we uglify it by storing the type of object as a string, so we don't # end up with a bazillion fields being foreign keys. Ugly, but works. class ModerationNotification(models.Model): - objectid = models.IntegerField(null=False, blank=False, db_index=True) - objecttype = models.CharField(null=False, blank=False, max_length=100) - text = models.TextField(null=False, blank=False) - author = models.CharField(null=False, blank=False, max_length=100) - date = models.DateTimeField(null=False, blank=False, auto_now=True) + objectid = models.IntegerField(null=False, blank=False, db_index=True) + objecttype = models.CharField(null=False, blank=False, max_length=100) + text = models.TextField(null=False, blank=False) + author = models.CharField(null=False, blank=False, max_length=100) + date = models.DateTimeField(null=False, blank=False, auto_now=True) - def __unicode__(self): - return "%s id %s (%s): %s" % (self.objecttype, self.objectid, self.date, self.text[:50]) + def __unicode__(self): + return "%s id %s (%s): %s" % (self.objecttype, self.objectid, self.date, self.text[:50]) - class Meta: - ordering = ('-date', ) + class Meta: + ordering = ('-date', ) diff --git a/pgweb/core/struct.py b/pgweb/core/struct.py index e3c697b3..2829d1a5 100644 --- a/pgweb/core/struct.py +++ b/pgweb/core/struct.py @@ -1,16 +1,16 @@ import os def get_struct(): - yield ('', None) - yield ('community/', None) - yield ('support/versioning/', None) + yield ('', None) + yield ('community/', None) + yield ('support/versioning/', None) - # Enumerate all the templates that will generate pages - pages_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../templates/pages/')) - for root, dirs, files in os.walk(pages_dir): - # Cut out the reference to the absolute root path - r = '' if root == pages_dir else os.path.relpath(root, pages_dir) - for f in files: - if f.endswith('.html'): - yield (os.path.join(r, f)[:-5] + "/", - None) + # Enumerate all the templates that will generate pages + pages_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../templates/pages/')) + for root, dirs, files in os.walk(pages_dir): + # Cut out the reference to the absolute root path + r = '' if root == pages_dir else os.path.relpath(root, pages_dir) + for f in files: + if f.endswith('.html'): + yield (os.path.join(r, f)[:-5] + "/", + None) diff --git a/pgweb/core/templatetags/pgfilters.py b/pgweb/core/templatetags/pgfilters.py index 8670535e..2d8cce99 100644 --- a/pgweb/core/templatetags/pgfilters.py +++ b/pgweb/core/templatetags/pgfilters.py @@ -7,50 +7,50 @@ register = template.Library() @register.filter(name='class_name') def class_name(ob): - return ob.__class__.__name__ + return ob.__class__.__name__ @register.filter(is_safe=True) def field_class(value, arg): - if 'class' in value.field.widget.attrs: - c = arg + ' ' + value.field.widget.attrs['class'] - else: - c = arg - return value.as_widget(attrs={"class": c}) + if 'class' in value.field.widget.attrs: + c = arg + ' ' + value.field.widget.attrs['class'] + else: + c = arg + return value.as_widget(attrs={"class": c}) @register.filter(name='hidemail') @stringfilter def hidemail(value): - return value.replace('@', ' at ') + return value.replace('@', ' at ') @register.filter(is_safe=True) def ischeckbox(obj): - return obj.field.widget.__class__.__name__ in ["CheckboxInput", "CheckboxSelectMultiple"] and not getattr(obj.field, 'regular_field', False) + return obj.field.widget.__class__.__name__ in ["CheckboxInput", "CheckboxSelectMultiple"] and not getattr(obj.field, 'regular_field', False) @register.filter(is_safe=True) def ismultiplecheckboxes(obj): - return obj.field.widget.__class__.__name__ == "CheckboxSelectMultiple" and not getattr(obj.field, 'regular_field', False) + return obj.field.widget.__class__.__name__ == "CheckboxSelectMultiple" and not getattr(obj.field, 'regular_field', False) @register.filter(is_safe=True) def isrequired_error(obj): - if obj.errors and obj.errors[0] == u"This field is required.": - return True - return False + if obj.errors and obj.errors[0] == u"This field is required.": + return True + return False @register.filter(is_safe=True) def label_class(value, arg): - return value.label_tag(attrs={'class': arg}) + return value.label_tag(attrs={'class': arg}) @register.filter() def planet_author(obj): - # takes a ImportedRSSItem object from a Planet feed and extracts the author - # information from the title - return obj.title.split(':')[0] + # takes a ImportedRSSItem object from a Planet feed and extracts the author + # information from the title + return obj.title.split(':')[0] @register.filter() def planet_title(obj): - # takes a ImportedRSSItem object from a Planet feed and extracts the info - # specific to the title of the Planet entry - return ":".join(obj.title.split(':')[1:]) + # takes a ImportedRSSItem object from a Planet feed and extracts the info + # specific to the title of the Planet entry + return ":".join(obj.title.split(':')[1:]) @register.filter(name='dictlookup') def dictlookup(value, key): @@ -58,4 +58,4 @@ def dictlookup(value, key): @register.filter(name='json') def tojson(value): - return json.dumps(value) + return json.dumps(value) diff --git a/pgweb/core/views.py b/pgweb/core/views.py index efb07ce3..509a52bc 100644 --- a/pgweb/core/views.py +++ b/pgweb/core/views.py @@ -40,101 +40,101 @@ from forms import OrganisationForm, MergeOrgsForm # Front page view @cache(minutes=10) def home(request): - news = NewsArticle.objects.filter(approved=True)[:5] - today = date.today() - # get up to seven events to display on the homepage - event_base_queryset = Event.objects.select_related('country').filter( - approved=True, - enddate__gte=today, - ) - # first, see if there are up to two non-badged events within 90 days - other_events = event_base_queryset.filter( - badged=False, - startdate__lte=today + timedelta(days=90), - ).order_by('enddate', 'startdate')[:2] - # based on that, get 7 - |other_events| community events to display - community_event_queryset = event_base_queryset.filter(badged=True).order_by('enddate', 'startdate')[:(7 - other_events.count())] - # now, return all the events in one unioned array! - events = community_event_queryset.union(other_events).order_by('enddate', 'startdate').all() - versions = Version.objects.filter(supported=True) - planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:9] + news = NewsArticle.objects.filter(approved=True)[:5] + today = date.today() + # get up to seven events to display on the homepage + event_base_queryset = Event.objects.select_related('country').filter( + approved=True, + enddate__gte=today, + ) + # first, see if there are up to two non-badged events within 90 days + other_events = event_base_queryset.filter( + badged=False, + startdate__lte=today + timedelta(days=90), + ).order_by('enddate', 'startdate')[:2] + # based on that, get 7 - |other_events| community events to display + community_event_queryset = event_base_queryset.filter(badged=True).order_by('enddate', 'startdate')[:(7 - other_events.count())] + # now, return all the events in one unioned array! + events = community_event_queryset.union(other_events).order_by('enddate', 'startdate').all() + versions = Version.objects.filter(supported=True) + planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:9] - return render(request, 'index.html', { - 'title': 'The world\'s most advanced open source database', - 'news': news, - 'newstags': NewsTag.objects.all(), - 'events': events, - 'versions': versions, - 'planet': planet, - }) + return render(request, 'index.html', { + 'title': 'The world\'s most advanced open source database', + 'news': news, + 'newstags': NewsTag.objects.all(), + 'events': events, + 'versions': versions, + 'planet': planet, + }) # About page view (contains information about PostgreSQL + random quotes) @cache(minutes=10) def about(request): - # get 5 random quotes - quotes = Quote.objects.filter(approved=True).order_by('?').all()[:5] - return render_pgweb(request, 'about', 'core/about.html', { - 'quotes': quotes, - }) + # get 5 random quotes + quotes = Quote.objects.filter(approved=True).order_by('?').all()[:5] + return render_pgweb(request, 'about', 'core/about.html', { + 'quotes': quotes, + }) # Community main page (contains surveys and potentially more) def community(request): - s = Survey.objects.filter(current=True) - try: - s = s[0] - except: - s = None - planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:7] - return render_pgweb(request, 'community', 'core/community.html', { - 'survey': s, - 'planet': planet, - }) + s = Survey.objects.filter(current=True) + try: + s = s[0] + except: + s = None + planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:7] + return render_pgweb(request, 'community', 'core/community.html', { + 'survey': s, + 'planet': planet, + }) # List of supported versions def versions(request): - return render_pgweb(request, 'support', 'support/versioning.html', { - 'versions': Version.objects.filter(tree__gt=0).filter(testing=0), - }) + return render_pgweb(request, 'support', 'support/versioning.html', { + 'versions': Version.objects.filter(tree__gt=0).filter(testing=0), + }) re_staticfilenames = re.compile("^[0-9A-Z/_-]+$", re.IGNORECASE) # Generic fallback view for static pages def fallback(request, url): - if url.find('..') > -1: - raise Http404('Page not found.') + if url.find('..') > -1: + raise Http404('Page not found.') - if not re_staticfilenames.match(url): - raise Http404('Page not found.') + if not re_staticfilenames.match(url): + raise Http404('Page not found.') - try: - t = loader.get_template('pages/%s.html' % url) - except TemplateDoesNotExist: - try: - t = loader.get_template('pages/%s/en.html' % url) - except TemplateDoesNotExist: - raise Http404('Page not found.') + try: + t = loader.get_template('pages/%s.html' % url) + except TemplateDoesNotExist: + try: + t = loader.get_template('pages/%s/en.html' % url) + except TemplateDoesNotExist: + raise Http404('Page not found.') - # Guestimate the nav section by looking at the URL and taking the first - # piece of it. - try: - navsect = url.split('/',2)[0] - except: - navsect = '' - c = PGWebContextProcessor(request) - c.update({'navmenu': get_nav_menu(navsect)}) - return HttpResponse(t.render(c)) + # Guestimate the nav section by looking at the URL and taking the first + # piece of it. + try: + navsect = url.split('/',2)[0] + except: + navsect = '' + c = PGWebContextProcessor(request) + c.update({'navmenu': get_nav_menu(navsect)}) + return HttpResponse(t.render(c)) # Edit-forms for core objects @login_required def organisationform(request, itemid): - if itemid != 'new': - get_object_or_404(Organisation, pk=itemid, managers=request.user) + if itemid != 'new': + get_object_or_404(Organisation, pk=itemid, managers=request.user) - return simple_form(Organisation, itemid, request, OrganisationForm, - redirect='/account/edit/organisations/') + return simple_form(Organisation, itemid, request, OrganisationForm, + redirect='/account/edit/organisations/') # robots.txt def robots(request): - return HttpResponse("""User-agent: * + return HttpResponse("""User-agent: * Disallow: /admin/ Disallow: /account/ Disallow: /docs/devel/ @@ -148,203 +148,203 @@ Sitemap: https://www.postgresql.org/sitemap.xml def _make_sitemap(pagelist): - resp = HttpResponse(content_type='text/xml') - x = PgXmlHelper(resp) - x.startDocument() - x.startElement('urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'}) - pages = 0 - for p in pagelist: - pages+=1 - x.startElement('url', {}) - x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0])) - if len(p) > 1 and p[1]: - x.add_xml_element('priority', unicode(p[1])) - if len(p) > 2 and p[2]: - x.add_xml_element('lastmod', p[2].isoformat() + "Z") - x.endElement('url') - x.endElement('urlset') - x.endDocument() - return resp + resp = HttpResponse(content_type='text/xml') + x = PgXmlHelper(resp) + x.startDocument() + x.startElement('urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'}) + pages = 0 + for p in pagelist: + pages+=1 + x.startElement('url', {}) + x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0])) + if len(p) > 1 and p[1]: + x.add_xml_element('priority', unicode(p[1])) + if len(p) > 2 and p[2]: + x.add_xml_element('lastmod', p[2].isoformat() + "Z") + x.endElement('url') + x.endElement('urlset') + x.endDocument() + return resp # Sitemap (XML format) @cache(hours=6) def sitemap(request): - return _make_sitemap(get_all_pages_struct()) + return _make_sitemap(get_all_pages_struct()) # Internal sitemap (only for our own search engine) # Note! Still served up to anybody who wants it, so don't # put anything secret in it... @cache(hours=6) def sitemap_internal(request): - return _make_sitemap(get_all_pages_struct(method='get_internal_struct')) + return _make_sitemap(get_all_pages_struct(method='get_internal_struct')) # dynamic CSS serving, meaning we merge a number of different CSS into a # single one, making sure it turns into a single http response. We do this # dynamically, since the output will be cached. _dynamic_cssmap = { - 'base': ['media/css/main.css', - 'media/css/normalize.css',], - 'docs': ['media/css/global.css', - 'media/css/table.css', - 'media/css/text.css', - 'media/css/docs.css'], - } + 'base': ['media/css/main.css', + 'media/css/normalize.css',], + 'docs': ['media/css/global.css', + 'media/css/table.css', + 'media/css/text.css', + 'media/css/docs.css'], + } @cache(hours=6) def dynamic_css(request, css): - if not _dynamic_cssmap.has_key(css): - raise Http404('CSS not found') - files = _dynamic_cssmap[css] - resp = HttpResponse(content_type='text/css') + if not _dynamic_cssmap.has_key(css): + raise Http404('CSS not found') + files = _dynamic_cssmap[css] + resp = HttpResponse(content_type='text/css') - # We honor if-modified-since headers by looking at the most recently - # touched CSS file. - latestmod = 0 - for fn in files: - try: - stime = os.stat(fn).st_mtime - if latestmod < stime: - latestmod = stime - except OSError: - # If we somehow referred to a file that didn't exist, or - # one that we couldn't access. - raise Http404('CSS (sub) not found') - if request.META.has_key('HTTP_IF_MODIFIED_SINCE'): - # This code is mostly stolen from django :) - matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", - request.META.get('HTTP_IF_MODIFIED_SINCE'), - re.IGNORECASE) - header_mtime = parse_http_date(matches.group(1)) - # We don't do length checking, just the date - if int(latestmod) <= header_mtime: - return HttpResponseNotModified(content_type='text/css') - resp['Last-Modified'] = http_date(latestmod) + # We honor if-modified-since headers by looking at the most recently + # touched CSS file. + latestmod = 0 + for fn in files: + try: + stime = os.stat(fn).st_mtime + if latestmod < stime: + latestmod = stime + except OSError: + # If we somehow referred to a file that didn't exist, or + # one that we couldn't access. + raise Http404('CSS (sub) not found') + if request.META.has_key('HTTP_IF_MODIFIED_SINCE'): + # This code is mostly stolen from django :) + matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", + request.META.get('HTTP_IF_MODIFIED_SINCE'), + re.IGNORECASE) + header_mtime = parse_http_date(matches.group(1)) + # We don't do length checking, just the date + if int(latestmod) <= header_mtime: + return HttpResponseNotModified(content_type='text/css') + resp['Last-Modified'] = http_date(latestmod) - for fn in files: - with open(fn) as f: - resp.write("/* %s */\n" % fn) - resp.write(f.read()) - resp.write("\n") + for fn in files: + with open(fn) as f: + resp.write("/* %s */\n" % fn) + resp.write(f.read()) + resp.write("\n") - return resp + return resp @nocache def csrf_failure(request, reason=''): - resp = render(request, 'errors/csrf_failure.html', { - 'reason': reason, - }) - resp.status_code = 403 # Forbidden - return resp + resp = render(request, 'errors/csrf_failure.html', { + 'reason': reason, + }) + resp.status_code = 403 # Forbidden + return resp # Basic information about the connection @cache(seconds=30) def system_information(request): - return render(request,'core/system_information.html', { - 'server': os.uname()[1], - 'cache_server': request.META['REMOTE_ADDR'] or None, - 'client_ip': get_client_ip(request), - 'django_version': django.get_version(), - }) + return render(request,'core/system_information.html', { + 'server': os.uname()[1], + 'cache_server': request.META['REMOTE_ADDR'] or None, + 'client_ip': get_client_ip(request), + 'django_version': django.get_version(), + }) # Sync timestamp for automirror. Keep it around for 30 seconds # Basically just a check that we can access the backend still... @cache(seconds=30) def sync_timestamp(request): - s = datetime.now().strftime("%Y-%m-%d %H:%M:%S\n") - r = HttpResponse(s, content_type='text/plain') - r['Content-Length'] = len(s) - return r + s = datetime.now().strftime("%Y-%m-%d %H:%M:%S\n") + r = HttpResponse(s, content_type='text/plain') + r['Content-Length'] = len(s) + return r # List of all unapproved objects, for the special admin page @login_required @user_passes_test(lambda u: u.is_staff) @user_passes_test(lambda u: u.groups.filter(name='pgweb moderators').exists()) def admin_pending(request): - return render(request, 'core/admin_pending.html', { - 'app_list': get_all_pending_moderations(), - }) + return render(request, 'core/admin_pending.html', { + 'app_list': get_all_pending_moderations(), + }) # Purge objects from varnish, for the admin pages @login_required @user_passes_test(lambda u: u.is_staff) @user_passes_test(lambda u: u.groups.filter(name='varnish purgers').exists()) def admin_purge(request): - if request.method == 'POST': - url = request.POST['url'] - expr = request.POST['expr'] - xkey = request.POST['xkey'] - l = len(filter(None, [url, expr, xkey])) - if l == 0: - # Nothing specified - return HttpResponseRedirect('.') - elif l > 1: - messages.error(request, "Can only specify one of url, expression and xkey!") - return HttpResponseRedirect('.') + if request.method == 'POST': + url = request.POST['url'] + expr = request.POST['expr'] + xkey = request.POST['xkey'] + l = len(filter(None, [url, expr, xkey])) + if l == 0: + # Nothing specified + return HttpResponseRedirect('.') + elif l > 1: + messages.error(request, "Can only specify one of url, expression and xkey!") + return HttpResponseRedirect('.') - if url: - varnish_purge(url) - elif expr: - varnish_purge_expr(expr) - else: - varnish_purge_xkey(xkey) + if url: + varnish_purge(url) + elif expr: + varnish_purge_expr(expr) + else: + varnish_purge_xkey(xkey) - messages.info(request, "Purge added.") - return HttpResponseRedirect('.') + messages.info(request, "Purge added.") + return HttpResponseRedirect('.') - # Fetch list of latest purges - curs = connection.cursor() - curs.execute("SELECT added, completed, consumer, CASE WHEN mode = 'K' THEN 'XKey' WHEN mode='P' THEN 'URL' ELSE 'Expression' END, expr FROM varnishqueue.queue q LEFT JOIN varnishqueue.consumers c ON c.consumerid=q.consumerid ORDER BY added DESC") - latest = curs.fetchall() + # Fetch list of latest purges + curs = connection.cursor() + curs.execute("SELECT added, completed, consumer, CASE WHEN mode = 'K' THEN 'XKey' WHEN mode='P' THEN 'URL' ELSE 'Expression' END, expr FROM varnishqueue.queue q LEFT JOIN varnishqueue.consumers c ON c.consumerid=q.consumerid ORDER BY added DESC") + latest = curs.fetchall() - return render(request, 'core/admin_purge.html', { - 'latest_purges': latest, - }) + return render(request, 'core/admin_purge.html', { + 'latest_purges': latest, + }) @csrf_exempt def api_varnish_purge(request): - if not request.META['REMOTE_ADDR'] in settings.VARNISH_PURGERS: - return HttpServerError(request, "Invalid client address") - if request.method != 'POST': - return HttpServerError(request, "Can't use this way") - n = int(request.POST['n']) - curs = connection.cursor() - for i in range(0, n): - expr = request.POST['p%s' % i] - curs.execute("SELECT varnish_purge_expr(%s)", (expr, )) - return HttpResponse("Purged %s entries\n" % n) + if not request.META['REMOTE_ADDR'] in settings.VARNISH_PURGERS: + return HttpServerError(request, "Invalid client address") + if request.method != 'POST': + return HttpServerError(request, "Can't use this way") + n = int(request.POST['n']) + curs = connection.cursor() + for i in range(0, n): + expr = request.POST['p%s' % i] + curs.execute("SELECT varnish_purge_expr(%s)", (expr, )) + return HttpResponse("Purged %s entries\n" % n) # Merge two organisations @login_required @user_passes_test(lambda u: u.is_superuser) @transaction.atomic def admin_mergeorg(request): - if request.method == 'POST': - form = MergeOrgsForm(data=request.POST) - if form.is_valid(): - # Ok, try to actually merge organisations, by moving all objects - # attached - f = form.cleaned_data['merge_from'] - t = form.cleaned_data['merge_into'] - for e in f.event_set.all(): - e.org = t - e.save() - for n in f.newsarticle_set.all(): - n.org = t - n.save() - for p in f.product_set.all(): - p.org = t - p.save() - for p in f.professionalservice_set.all(): - p.organisation = t - p.save() - # Now that everything is moved, we can delete the organisation - f.delete() + if request.method == 'POST': + form = MergeOrgsForm(data=request.POST) + if form.is_valid(): + # Ok, try to actually merge organisations, by moving all objects + # attached + f = form.cleaned_data['merge_from'] + t = form.cleaned_data['merge_into'] + for e in f.event_set.all(): + e.org = t + e.save() + for n in f.newsarticle_set.all(): + n.org = t + n.save() + for p in f.product_set.all(): + p.org = t + p.save() + for p in f.professionalservice_set.all(): + p.organisation = t + p.save() + # Now that everything is moved, we can delete the organisation + f.delete() - return HttpResponseRedirect("/admin/core/organisation/") - # Else fall through to re-render form with errors - else: - form = MergeOrgsForm() + return HttpResponseRedirect("/admin/core/organisation/") + # Else fall through to re-render form with errors + else: + form = MergeOrgsForm() - return render(request, 'core/admin_mergeorg.html', { - 'form': form, + return render(request, 'core/admin_mergeorg.html', { + 'form': form, }) diff --git a/pgweb/docs/forms.py b/pgweb/docs/forms.py index 8a9fae6c..c26695ff 100644 --- a/pgweb/docs/forms.py +++ b/pgweb/docs/forms.py @@ -1,8 +1,8 @@ from django import forms class DocCommentForm(forms.Form): - name = forms.CharField(max_length=100, required=True, label='Your Name') - email = forms.EmailField(max_length=100, required=True, label='Your Email') - shortdesc = forms.CharField(max_length=100, required=True, label="Subject") - details = forms.CharField(required=True, widget=forms.Textarea, - label="What is your comment?") + name = forms.CharField(max_length=100, required=True, label='Your Name') + email = forms.EmailField(max_length=100, required=True, label='Your Email') + shortdesc = forms.CharField(max_length=100, required=True, label="Subject") + details = forms.CharField(required=True, widget=forms.Textarea, + label="What is your comment?") diff --git a/pgweb/docs/migrations/0003_docs_alias.py b/pgweb/docs/migrations/0003_docs_alias.py index a6c72a1b..745042e3 100644 --- a/pgweb/docs/migrations/0003_docs_alias.py +++ b/pgweb/docs/migrations/0003_docs_alias.py @@ -23,5 +23,5 @@ class Migration(migrations.Migration): 'verbose_name_plural': 'Doc page aliases', }, ), - migrations.RunSQL("CREATE UNIQUE INDEX docsalias_unique ON docsalias (LEAST(file1, file2), GREATEST(file1, file2))"), + migrations.RunSQL("CREATE UNIQUE INDEX docsalias_unique ON docsalias (LEAST(file1, file2), GREATEST(file1, file2))"), ] diff --git a/pgweb/docs/models.py b/pgweb/docs/models.py index a2754b60..7a522147 100644 --- a/pgweb/docs/models.py +++ b/pgweb/docs/models.py @@ -2,32 +2,32 @@ from django.db import models from pgweb.core.models import Version class DocPage(models.Model): - id = models.AutoField(null=False, primary_key=True) - file = models.CharField(max_length=64, null=False, blank=False) - version = models.ForeignKey(Version, null=False, blank=False, db_column='version', to_field='tree') - title = models.CharField(max_length=256, null=True, blank=True) - content = models.TextField(null=True, blank=True) + id = models.AutoField(null=False, primary_key=True) + file = models.CharField(max_length=64, null=False, blank=False) + version = models.ForeignKey(Version, null=False, blank=False, db_column='version', to_field='tree') + title = models.CharField(max_length=256, null=True, blank=True) + content = models.TextField(null=True, blank=True) - def display_version(self): - """Version as used for displaying and in URLs""" - if self.version.tree == 0: - return 'devel' - else: - return str(self.version.numtree) + def display_version(self): + """Version as used for displaying and in URLs""" + if self.version.tree == 0: + return 'devel' + else: + return str(self.version.numtree) - class Meta: - db_table = 'docs' - # Index file first, because we want to list versions by file - unique_together = [('file', 'version')] + class Meta: + db_table = 'docs' + # Index file first, because we want to list versions by file + unique_together = [('file', 'version')] class DocPageAlias(models.Model): - file1 = models.CharField(max_length=64, null=False, blank=False, unique=True) - file2 = models.CharField(max_length=64, null=False, blank=False, unique=True) + file1 = models.CharField(max_length=64, null=False, blank=False, unique=True) + file2 = models.CharField(max_length=64, null=False, blank=False, unique=True) - def __unicode__(self): - return u"%s <-> %s" % (self.file1, self.file2) + def __unicode__(self): + return u"%s <-> %s" % (self.file1, self.file2) - # XXX: needs a unique functional index as well, see the migration! - class Meta: - db_table = 'docsalias' - verbose_name_plural='Doc page aliases' + # XXX: needs a unique functional index as well, see the migration! + class Meta: + db_table = 'docsalias' + verbose_name_plural='Doc page aliases' diff --git a/pgweb/docs/struct.py b/pgweb/docs/struct.py index 87e5635e..942d0573 100644 --- a/pgweb/docs/struct.py +++ b/pgweb/docs/struct.py @@ -2,52 +2,52 @@ from django.db import connection from pgweb.core.models import Version def get_struct(): - currentversion = Version.objects.get(current=True) + currentversion = Version.objects.get(current=True) - # Can't use a model here, because we don't (for some reason) have a - # hard link to the versions table here - # Make sure we exclude the /devel/ docs because they are blocked by - # robots.txt, and thus will cause tohusands of warnings in search - # engines. - curs = connection.cursor() - curs.execute("SELECT d.version, d.file, v.docsloaded, v.testing FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version > 0 ORDER BY d.version DESC") + # Can't use a model here, because we don't (for some reason) have a + # hard link to the versions table here + # Make sure we exclude the /devel/ docs because they are blocked by + # robots.txt, and thus will cause tohusands of warnings in search + # engines. + curs = connection.cursor() + curs.execute("SELECT d.version, d.file, v.docsloaded, v.testing FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version > 0 ORDER BY d.version DESC") - # Start priority is higher than average but lower than what we assign - # to the current version of the docs. - docprio = 0.8 - lastversion = None + # Start priority is higher than average but lower than what we assign + # to the current version of the docs. + docprio = 0.8 + lastversion = None - for version, filename, loaded, testing in curs.fetchall(): - # Decrease the priority with 0.1 for every version of the docs - # we move back in time, until we reach 0.1. At 0.1 it's unlikely - # to show up in a general search, but still possible to reach - # through version specific searching for example. - if lastversion != version: - if docprio > 0.2: - docprio -= 0.1 - lastversion = version + for version, filename, loaded, testing in curs.fetchall(): + # Decrease the priority with 0.1 for every version of the docs + # we move back in time, until we reach 0.1. At 0.1 it's unlikely + # to show up in a general search, but still possible to reach + # through version specific searching for example. + if lastversion != version: + if docprio > 0.2: + docprio -= 0.1 + lastversion = version - if version >= 10: - version = int(version) + if version >= 10: + version = int(version) - yield ('docs/%s/%s' % (version, filename), - testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio - loaded) + yield ('docs/%s/%s' % (version, filename), + testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio + loaded) - # Also yield the current version urls, with the highest - # possible priority - if version == currentversion.tree: - yield ('docs/current/%s' % filename, - 1.0, loaded) + # Also yield the current version urls, with the highest + # possible priority + if version == currentversion.tree: + yield ('docs/current/%s' % filename, + 1.0, loaded) # For our internal sitemap (used only by our own search engine), # include the devel version of the docs (and only those, since the # other versions are already included) def get_internal_struct(): - curs = connection.cursor() - curs.execute("SELECT d.file, v.docsloaded FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version = 0") + curs = connection.cursor() + curs.execute("SELECT d.file, v.docsloaded FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version = 0") - for filename, loaded in curs.fetchall(): - yield ('docs/devel/%s' % (filename, ), - 0.1, - loaded) + for filename, loaded in curs.fetchall(): + yield ('docs/devel/%s' % (filename, ), + 0.1, + loaded) diff --git a/pgweb/docs/views.py b/pgweb/docs/views.py index 6cfb1860..d0936272 100644 --- a/pgweb/docs/views.py +++ b/pgweb/docs/views.py @@ -20,156 +20,156 @@ from forms import DocCommentForm @allow_frames @content_sources('style', "'unsafe-inline'") def docpage(request, version, filename): - loaddate = None - # Get the current version both to map the /current/ url, and to later - # determine if we allow comments on this page. - currver = Version.objects.filter(current=True)[0].tree - if version == 'current': - ver = currver - elif version == 'devel': - ver = Decimal(0) - loaddate = Version.objects.get(tree=Decimal(0)).docsloaded - else: - ver = Decimal(version) - if ver == Decimal(0): - raise Http404("Version not found") + loaddate = None + # Get the current version both to map the /current/ url, and to later + # determine if we allow comments on this page. + currver = Version.objects.filter(current=True)[0].tree + if version == 'current': + ver = currver + elif version == 'devel': + ver = Decimal(0) + loaddate = Version.objects.get(tree=Decimal(0)).docsloaded + else: + ver = Decimal(version) + if ver == Decimal(0): + raise Http404("Version not found") - if ver < Decimal("7.1") and ver > Decimal(0): - extension = "htm" - else: - extension = "html" + if ver < Decimal("7.1") and ver > Decimal(0): + extension = "htm" + else: + extension = "html" - if ver < Decimal("7.1") and ver > Decimal(0): - indexname = "postgres.htm" - elif ver == Decimal("7.1"): - indexname = "postgres.html" - else: - indexname = "index.html" + if ver < Decimal("7.1") and ver > Decimal(0): + indexname = "postgres.htm" + elif ver == Decimal("7.1"): + indexname = "postgres.html" + else: + indexname = "index.html" - if ver >= 10 and version.find('.') > -1: - # Version 10 and up, but specified as 10.0 / 11.0 etc, so redirect back without the - # decimal. - return HttpResponsePermanentRedirect("/docs/{0}/{1}.html".format(int(ver), filename)) + if ver >= 10 and version.find('.') > -1: + # Version 10 and up, but specified as 10.0 / 11.0 etc, so redirect back without the + # decimal. + return HttpResponsePermanentRedirect("/docs/{0}/{1}.html".format(int(ver), filename)) - fullname = "%s.%s" % (filename, extension) - page = get_object_or_404(DocPage, version=ver, file=fullname) - versions = DocPage.objects.extra( - where=["file=%s OR file IN (SELECT file2 FROM docsalias WHERE file1=%s) OR file IN (SELECT file1 FROM docsalias WHERE file2=%s)"], - params=[fullname, fullname, fullname], - select={ - 'supported':"COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')", - 'testing':"COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)", - }).order_by('-supported', 'version').only('version', 'file') + fullname = "%s.%s" % (filename, extension) + page = get_object_or_404(DocPage, version=ver, file=fullname) + versions = DocPage.objects.extra( + where=["file=%s OR file IN (SELECT file2 FROM docsalias WHERE file1=%s) OR file IN (SELECT file1 FROM docsalias WHERE file2=%s)"], + params=[fullname, fullname, fullname], + select={ + 'supported':"COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')", + 'testing':"COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)", + }).order_by('-supported', 'version').only('version', 'file') - return render(request, 'docs/docspage.html', { - 'page': page, - 'supported_versions': [v for v in versions if v.supported], - 'devel_versions': [v for v in versions if not v.supported and v.testing], - 'unsupported_versions': [v for v in versions if not v.supported and not v.testing], - 'title': page.title, - 'doc_index_filename': indexname, - 'loaddate': loaddate, - }) + return render(request, 'docs/docspage.html', { + 'page': page, + 'supported_versions': [v for v in versions if v.supported], + 'devel_versions': [v for v in versions if not v.supported and v.testing], + 'unsupported_versions': [v for v in versions if not v.supported and not v.testing], + 'title': page.title, + 'doc_index_filename': indexname, + 'loaddate': loaddate, + }) def docspermanentredirect(request, version, typ, page, *args): - """Provides a permanent redirect from the old static/interactive pages to - the modern pages that do not have said keywords. - """ - url = "/docs/%s/" % version - if page: - url += page - return HttpResponsePermanentRedirect(url) + """Provides a permanent redirect from the old static/interactive pages to + the modern pages that do not have said keywords. + """ + url = "/docs/%s/" % version + if page: + url += page + return HttpResponsePermanentRedirect(url) def docsrootpage(request, version): - return docpage(request, version, 'index') + return docpage(request, version, 'index') def redirect_root(request, version): - return HttpResponsePermanentRedirect("/docs/%s/" % version) + return HttpResponsePermanentRedirect("/docs/%s/" % version) def root(request): - versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree') - return render_pgweb(request, 'docs', 'docs/index.html', { - 'versions': versions, - }) + versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree') + return render_pgweb(request, 'docs', 'docs/index.html', { + 'versions': versions, + }) class _VersionPdfWrapper(object): - """ - A wrapper around a version that knows to look for PDF files, and - return their sizes. - """ - def __init__(self, version): - self.__version = version - self.a4pdf = self._find_pdf('A4') - self.uspdf = self._find_pdf('US') - # Some versions have, ahem, strange index filenames - if self.__version.tree < Decimal('6.4'): - self.indexname = 'book01.htm' - elif self.__version.tree < Decimal('7.0'): - self.indexname = 'postgres.htm' - elif self.__version.tree < Decimal('7.2'): - self.indexname = 'postgres.html' - else: - self.indexname = 'index.html' - def __getattr__(self, name): - return getattr(self.__version, name) - def _find_pdf(self, pagetype): - try: - return os.stat('%s/documentation/pdf/%s/postgresql-%s-%s.pdf' % (settings.STATIC_CHECKOUT, self.__version.numtree, self.__version.numtree, pagetype)).st_size - except: - return 0 + """ + A wrapper around a version that knows to look for PDF files, and + return their sizes. + """ + def __init__(self, version): + self.__version = version + self.a4pdf = self._find_pdf('A4') + self.uspdf = self._find_pdf('US') + # Some versions have, ahem, strange index filenames + if self.__version.tree < Decimal('6.4'): + self.indexname = 'book01.htm' + elif self.__version.tree < Decimal('7.0'): + self.indexname = 'postgres.htm' + elif self.__version.tree < Decimal('7.2'): + self.indexname = 'postgres.html' + else: + self.indexname = 'index.html' + def __getattr__(self, name): + return getattr(self.__version, name) + def _find_pdf(self, pagetype): + try: + return os.stat('%s/documentation/pdf/%s/postgresql-%s-%s.pdf' % (settings.STATIC_CHECKOUT, self.__version.numtree, self.__version.numtree, pagetype)).st_size + except: + return 0 def manuals(request): - versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree') - return render_pgweb(request, 'docs', 'docs/manuals.html', { - 'versions': [_VersionPdfWrapper(v) for v in versions], - }) + versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree') + return render_pgweb(request, 'docs', 'docs/manuals.html', { + 'versions': [_VersionPdfWrapper(v) for v in versions], + }) def manualarchive(request): - versions = Version.objects.filter(testing=0,supported=False,tree__gt=0).order_by('-tree') - return render_pgweb(request, 'docs', 'docs/archive.html', { - 'versions': [_VersionPdfWrapper(v) for v in versions], - }) + versions = Version.objects.filter(testing=0,supported=False,tree__gt=0).order_by('-tree') + return render_pgweb(request, 'docs', 'docs/archive.html', { + 'versions': [_VersionPdfWrapper(v) for v in versions], + }) @login_required def commentform(request, itemid, version, filename): - v = get_object_or_404(Version, tree=version) - if not v.supported: - # No docs comments on unsupported versions - return HttpResponseRedirect("/docs/{0}/{1}".format(version, filename)) + v = get_object_or_404(Version, tree=version) + if not v.supported: + # No docs comments on unsupported versions + return HttpResponseRedirect("/docs/{0}/{1}".format(version, filename)) - if request.method == 'POST': - form = DocCommentForm(request.POST) - if form.is_valid(): - if version == '0.0': - version = 'devel' + if request.method == 'POST': + form = DocCommentForm(request.POST) + if form.is_valid(): + if version == '0.0': + version = 'devel' - send_template_mail( - settings.DOCSREPORT_NOREPLY_EMAIL, - settings.DOCSREPORT_EMAIL, - '%s' % form.cleaned_data['shortdesc'], - 'docs/docsbugmail.txt', { - 'version': version, - 'filename': filename, - 'details': form.cleaned_data['details'], - }, - usergenerated=True, - cc=form.cleaned_data['email'], - replyto='%s, %s' % (form.cleaned_data['email'], settings.DOCSREPORT_EMAIL), - sendername='PG Doc comments form' - ) - return render_pgweb(request, 'docs', 'docs/docsbug_completed.html', {}) - else: - form = DocCommentForm(initial={ - 'name': '%s %s' % (request.user.first_name, request.user.last_name), - 'email': request.user.email, - }) + send_template_mail( + settings.DOCSREPORT_NOREPLY_EMAIL, + settings.DOCSREPORT_EMAIL, + '%s' % form.cleaned_data['shortdesc'], + 'docs/docsbugmail.txt', { + 'version': version, + 'filename': filename, + 'details': form.cleaned_data['details'], + }, + usergenerated=True, + cc=form.cleaned_data['email'], + replyto='%s, %s' % (form.cleaned_data['email'], settings.DOCSREPORT_EMAIL), + sendername='PG Doc comments form' + ) + return render_pgweb(request, 'docs', 'docs/docsbug_completed.html', {}) + else: + form = DocCommentForm(initial={ + 'name': '%s %s' % (request.user.first_name, request.user.last_name), + 'email': request.user.email, + }) - return render_pgweb(request, 'docs', 'base/form.html', { - 'form': form, - 'formitemtype': 'documentation comment', - 'operation': 'Submit', - 'form_intro': template_to_string('docs/docsbug.html', { - 'user': request.user, - }), - 'savebutton': 'Send Email', - }) + return render_pgweb(request, 'docs', 'base/form.html', { + 'form': form, + 'formitemtype': 'documentation comment', + 'operation': 'Submit', + 'form_intro': template_to_string('docs/docsbug.html', { + 'user': request.user, + }), + 'savebutton': 'Send Email', + }) diff --git a/pgweb/downloads/admin.py b/pgweb/downloads/admin.py index 3f417b4d..2449acbd 100644 --- a/pgweb/downloads/admin.py +++ b/pgweb/downloads/admin.py @@ -8,49 +8,49 @@ from pgweb.util.admin import PgwebAdmin from models import StackBuilderApp, Category, Product, LicenceType class ProductAdmin(PgwebAdmin): - list_display = ('name', 'org', 'approved', 'lastconfirmed',) - list_filter = ('approved',) - search_fields = ('name', 'description', ) - ordering = ('name', ) + list_display = ('name', 'org', 'approved', 'lastconfirmed',) + list_filter = ('approved',) + search_fields = ('name', 'description', ) + ordering = ('name', ) def duplicate_stackbuilderapp(modeladmin, request, queryset): - # Duplicate each individual selected object, but turn off - # the active flag if it's on. - for o in queryset: - o.id = None # Triggers creation of a new object - o.active = False - o.textid = o.textid + "_new" - o.save() + # Duplicate each individual selected object, but turn off + # the active flag if it's on. + for o in queryset: + o.id = None # Triggers creation of a new object + o.active = False + o.textid = o.textid + "_new" + o.save() duplicate_stackbuilderapp.short_description = "Duplicate application" class StackBuilderAppAdminForm(forms.ModelForm): - class Meta: - model = StackBuilderApp - exclude = () + class Meta: + model = StackBuilderApp + exclude = () - def clean_textid(self): - if not re.match('^[a-z0-9_]*$', self.cleaned_data['textid']): - raise ValidationError('Only lowerchase characters, numbers and underscore allowed!') - return self.cleaned_data['textid'] + def clean_textid(self): + if not re.match('^[a-z0-9_]*$', self.cleaned_data['textid']): + raise ValidationError('Only lowerchase characters, numbers and underscore allowed!') + return self.cleaned_data['textid'] - def clean_txtdependencies(self): - if len(self.cleaned_data['txtdependencies']) == 0: - return '' + def clean_txtdependencies(self): + if len(self.cleaned_data['txtdependencies']) == 0: + return '' - deplist = self.cleaned_data['txtdependencies'].split(',') - if len(deplist) != len(set(deplist)): - raise ValidationError('Duplicate dependencies not allowed!') + deplist = self.cleaned_data['txtdependencies'].split(',') + if len(deplist) != len(set(deplist)): + raise ValidationError('Duplicate dependencies not allowed!') - for d in deplist: - if not StackBuilderApp.objects.filter(textid=d).exists(): - raise ValidationError("Dependency '%s' does not exist!" % d) - return self.cleaned_data['txtdependencies'] + for d in deplist: + if not StackBuilderApp.objects.filter(textid=d).exists(): + raise ValidationError("Dependency '%s' does not exist!" % d) + return self.cleaned_data['txtdependencies'] class StackBuilderAppAdmin(admin.ModelAdmin): - list_display = ('textid', 'active', 'name', 'platform', 'version', ) - actions = [duplicate_stackbuilderapp, ] - form = StackBuilderAppAdminForm + list_display = ('textid', 'active', 'name', 'platform', 'version', ) + actions = [duplicate_stackbuilderapp, ] + form = StackBuilderAppAdminForm admin.site.register(Category) admin.site.register(LicenceType) diff --git a/pgweb/downloads/forms.py b/pgweb/downloads/forms.py index 93a952dd..150e0b70 100644 --- a/pgweb/downloads/forms.py +++ b/pgweb/downloads/forms.py @@ -4,12 +4,12 @@ from pgweb.core.models import Organisation from models import Product class ProductForm(forms.ModelForm): - form_intro = """Note that in order to register a new product, you must first register an organisation. + form_intro = """Note that in order to register a new product, you must first register an organisation. If you have not done so, use this form.""" - def __init__(self, *args, **kwargs): - super(ProductForm, self).__init__(*args, **kwargs) - def filter_by_user(self, user): - self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) - class Meta: - model = Product - exclude = ('lastconfirmed', 'approved', ) + def __init__(self, *args, **kwargs): + super(ProductForm, self).__init__(*args, **kwargs) + def filter_by_user(self, user): + self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) + class Meta: + model = Product + exclude = ('lastconfirmed', 'approved', ) diff --git a/pgweb/downloads/models.py b/pgweb/downloads/models.py index 61849e77..682fdd92 100644 --- a/pgweb/downloads/models.py +++ b/pgweb/downloads/models.py @@ -4,85 +4,85 @@ from pgweb.core.models import Organisation class Category(models.Model): - catname = models.CharField(max_length=100, null=False, blank=False) - blurb = models.TextField(null=False, blank=True) + catname = models.CharField(max_length=100, null=False, blank=False) + blurb = models.TextField(null=False, blank=True) - def __unicode__(self): - return self.catname + def __unicode__(self): + return self.catname - class Meta: - ordering = ('catname',) + class Meta: + ordering = ('catname',) class LicenceType(models.Model): - typename = models.CharField(max_length=100, null=False, blank=False) + typename = models.CharField(max_length=100, null=False, blank=False) - def __unicode__(self): - return self.typename + def __unicode__(self): + return self.typename - class Meta: - ordering = ('typename',) + class Meta: + ordering = ('typename',) class Product(models.Model): - name = models.CharField(max_length=100, null=False, blank=False, unique=True) - approved = models.BooleanField(null=False, default=False) - org = models.ForeignKey(Organisation, db_column="publisher_id", null=False, verbose_name="Organisation") - url = models.URLField(null=False, blank=False) - category = models.ForeignKey(Category, null=False) - licencetype = models.ForeignKey(LicenceType, null=False, verbose_name="Licence type") - description = models.TextField(null=False, blank=False) - price = models.CharField(max_length=200, null=False, blank=True) - lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True) + name = models.CharField(max_length=100, null=False, blank=False, unique=True) + approved = models.BooleanField(null=False, default=False) + org = models.ForeignKey(Organisation, db_column="publisher_id", null=False, verbose_name="Organisation") + url = models.URLField(null=False, blank=False) + category = models.ForeignKey(Category, null=False) + licencetype = models.ForeignKey(LicenceType, null=False, verbose_name="Licence type") + description = models.TextField(null=False, blank=False) + price = models.CharField(max_length=200, null=False, blank=True) + lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True) - send_notification = True - markdown_fields = ('description', ) + send_notification = True + markdown_fields = ('description', ) - def __unicode__(self): - return self.name + def __unicode__(self): + return self.name - def verify_submitter(self, user): - return (len(self.org.managers.filter(pk=user.pk)) == 1) + def verify_submitter(self, user): + return (len(self.org.managers.filter(pk=user.pk)) == 1) - class Meta: - ordering = ('name',) + class Meta: + ordering = ('name',) class StackBuilderApp(models.Model): - textid = models.CharField(max_length=100, null=False, blank=False) - version = models.CharField(max_length=20, null=False, blank=False) - platform = models.CharField(max_length=20, null=False, blank=False, - choices= (('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), ('osx', 'Mac OS X'), - ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)')) - ) - secondaryplatform = models.CharField(max_length=20, null=False, blank=True, - choices= (('', 'None'), ('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), - ('osx', 'Mac OS X'), ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)')) - ) - name = models.CharField(max_length=500, null=False, blank=False) - active = models.BooleanField(null=False, blank=False, default=True) - description = models.TextField(null=False, blank=False) - category = models.CharField(max_length=100, null=False, blank=False) - pgversion = models.CharField(max_length=5, null=False, blank=True) - edbversion = models.CharField(max_length=5, null=False, blank=True) - format = models.CharField(max_length=5, null=False, blank=False, - choices = (('bin', 'Linux .bin'), ('app', 'Mac .app'), - ('pkg', 'Mac .pkg'), ('mpkg', 'Mac .mpkg'), - ('exe', 'Windows .exe'), ('msi', 'Windows .msi')) - ) - installoptions = models.CharField(max_length=500, null=False, blank=True) - upgradeoptions = models.CharField(max_length=500, null=False, blank=True) - checksum = models.CharField(max_length=32, null=False, blank=False) - mirrorpath = models.CharField(max_length=500, null=False, blank=True) - alturl = models.URLField(max_length=500, null=False, blank=True) - txtdependencies = models.CharField(max_length=1000, null=False, blank=True, - verbose_name='Dependencies', - help_text='Comma separated list of text dependencies, no spaces!') - versionkey = models.CharField(max_length=500, null=False, blank=False) - manifesturl = models.URLField(max_length=500, null=False, blank=True) + textid = models.CharField(max_length=100, null=False, blank=False) + version = models.CharField(max_length=20, null=False, blank=False) + platform = models.CharField(max_length=20, null=False, blank=False, + choices= (('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), ('osx', 'Mac OS X'), + ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)')) + ) + secondaryplatform = models.CharField(max_length=20, null=False, blank=True, + choices= (('', 'None'), ('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), + ('osx', 'Mac OS X'), ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)')) + ) + name = models.CharField(max_length=500, null=False, blank=False) + active = models.BooleanField(null=False, blank=False, default=True) + description = models.TextField(null=False, blank=False) + category = models.CharField(max_length=100, null=False, blank=False) + pgversion = models.CharField(max_length=5, null=False, blank=True) + edbversion = models.CharField(max_length=5, null=False, blank=True) + format = models.CharField(max_length=5, null=False, blank=False, + choices = (('bin', 'Linux .bin'), ('app', 'Mac .app'), + ('pkg', 'Mac .pkg'), ('mpkg', 'Mac .mpkg'), + ('exe', 'Windows .exe'), ('msi', 'Windows .msi')) + ) + installoptions = models.CharField(max_length=500, null=False, blank=True) + upgradeoptions = models.CharField(max_length=500, null=False, blank=True) + checksum = models.CharField(max_length=32, null=False, blank=False) + mirrorpath = models.CharField(max_length=500, null=False, blank=True) + alturl = models.URLField(max_length=500, null=False, blank=True) + txtdependencies = models.CharField(max_length=1000, null=False, blank=True, + verbose_name='Dependencies', + help_text='Comma separated list of text dependencies, no spaces!') + versionkey = models.CharField(max_length=500, null=False, blank=False) + manifesturl = models.URLField(max_length=500, null=False, blank=True) - purge_urls = ('/applications-v2.xml', ) + purge_urls = ('/applications-v2.xml', ) - def __unicode__(self): - return "%s %s %s" % (self.textid, self.version, self.platform) + def __unicode__(self): + return "%s %s %s" % (self.textid, self.version, self.platform) - class Meta: - unique_together = ('textid', 'version', 'platform', ) - ordering = ('textid', 'name', 'platform', ) + class Meta: + unique_together = ('textid', 'version', 'platform', ) + ordering = ('textid', 'name', 'platform', ) diff --git a/pgweb/downloads/struct.py b/pgweb/downloads/struct.py index 0f717f2e..27a92072 100644 --- a/pgweb/downloads/struct.py +++ b/pgweb/downloads/struct.py @@ -1,10 +1,10 @@ from models import Category def get_struct(): - # Products - for c in Category.objects.all(): - yield ('download/products/%s/' % c.id, - 0.3) + # Products + for c in Category.objects.all(): + yield ('download/products/%s/' % c.id, + 0.3) - # Don't index the ftp browser for now - it doesn't really contain - # anything useful to search + # Don't index the ftp browser for now - it doesn't really contain + # anything useful to search diff --git a/pgweb/downloads/views.py b/pgweb/downloads/views.py index 051afc82..b51f829e 100644 --- a/pgweb/downloads/views.py +++ b/pgweb/downloads/views.py @@ -21,101 +21,101 @@ from forms import ProductForm # FTP browser ####### def ftpbrowser(request, subpath): - if subpath: - # An actual path has been selected. Fancy! + if subpath: + # An actual path has been selected. Fancy! - if subpath.find('..') > -1: - # Just claim it doesn't exist if the user tries to do this - # type of bad thing - raise Http404 - subpath = subpath.strip('/') - else: - subpath="" + if subpath.find('..') > -1: + # Just claim it doesn't exist if the user tries to do this + # type of bad thing + raise Http404 + subpath = subpath.strip('/') + else: + subpath="" - # Pickle up the list of things we need - try: - f = open(settings.FTP_PICKLE, "rb") - allnodes = pickle.load(f) - f.close() - except Exception, e: - return HttpServerError(request, "Failed to load ftp site information: %s" % e) + # Pickle up the list of things we need + try: + f = open(settings.FTP_PICKLE, "rb") + allnodes = pickle.load(f) + f.close() + except Exception, e: + return HttpServerError(request, "Failed to load ftp site information: %s" % e) - # An incoming subpath may either be canonical, or have one or more elements - # present that are actually symlinks. For each element of the path, test to - # see if it is present in the pickle. If not, look for a symlink entry with - # and if present, replace the original entry with the symlink target. - canonpath = '' - if subpath != '': - parent = '' - for d in subpath.split('/'): - # Check if allnodes contains a node matching the path - if allnodes[parent].has_key(d): - if allnodes[parent][d]['t'] == 'd': - canonpath = os.path.join(canonpath, d) - elif allnodes[parent][d]['t'] == 'l': - canonpath = os.path.join(canonpath, allnodes[parent][d]['d']).strip('/') - else: - # There's a matching node, but it's not a link or a directory - raise Http404 + # An incoming subpath may either be canonical, or have one or more elements + # present that are actually symlinks. For each element of the path, test to + # see if it is present in the pickle. If not, look for a symlink entry with + # and if present, replace the original entry with the symlink target. + canonpath = '' + if subpath != '': + parent = '' + for d in subpath.split('/'): + # Check if allnodes contains a node matching the path + if allnodes[parent].has_key(d): + if allnodes[parent][d]['t'] == 'd': + canonpath = os.path.join(canonpath, d) + elif allnodes[parent][d]['t'] == 'l': + canonpath = os.path.join(canonpath, allnodes[parent][d]['d']).strip('/') + else: + # There's a matching node, but it's not a link or a directory + raise Http404 - parent = canonpath - else: - # There's no matching node - raise Http404 + parent = canonpath + else: + # There's no matching node + raise Http404 - # If we wound up with a canonical path that doesn't match the original request, - # redirect the user - canonpath = canonpath.strip('/') - if subpath != canonpath: - return HttpResponseRedirect('/ftp/' + canonpath) + # If we wound up with a canonical path that doesn't match the original request, + # redirect the user + canonpath = canonpath.strip('/') + if subpath != canonpath: + return HttpResponseRedirect('/ftp/' + canonpath) - node = allnodes[subpath] - del allnodes + node = allnodes[subpath] + del allnodes - # Add all directories - directories = [{'link': k, 'url': k, 'type': 'd'} for k,v in node.items() if v['t'] == 'd'] - # Add all symlinks (only directories supported) - directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k,v in node.items() if v['t'] == 'l']) + # Add all directories + directories = [{'link': k, 'url': k, 'type': 'd'} for k,v in node.items() if v['t'] == 'd'] + # Add all symlinks (only directories supported) + directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k,v in node.items() if v['t'] == 'l']) - # A ittle early sorting wouldn't go amiss, so .. ends up at the top - directories.sort(key = version_sort, reverse=True) + # A ittle early sorting wouldn't go amiss, so .. ends up at the top + directories.sort(key = version_sort, reverse=True) - # Add a link to the parent directory - if subpath: - directories.insert(0, {'link':'[Parent Directory]', 'url':'..'}) + # Add a link to the parent directory + if subpath: + directories.insert(0, {'link':'[Parent Directory]', 'url':'..'}) - # Fetch files - files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k,v in node.items() if v['t'] == 'f'] + # Fetch files + files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k,v in node.items() if v['t'] == 'f'] - breadcrumbs = [] - if subpath: - breadroot = "" - for pathpiece in subpath.split('/'): - if not pathpiece: - # Trailing slash will give out an empty pathpiece - continue - if breadroot: - breadroot = "%s/%s" % (breadroot, pathpiece) - else: - breadroot = pathpiece - breadcrumbs.append({'name': pathpiece, 'path': breadroot}); + breadcrumbs = [] + if subpath: + breadroot = "" + for pathpiece in subpath.split('/'): + if not pathpiece: + # Trailing slash will give out an empty pathpiece + continue + if breadroot: + breadroot = "%s/%s" % (breadroot, pathpiece) + else: + breadroot = pathpiece + breadcrumbs.append({'name': pathpiece, 'path': breadroot}); - # Check if there are any "content files" we should render directly on the webpage - file_readme = (node.has_key('README') and node['README']['t']=='f') and node['README']['c'] or None; - file_message = (node.has_key('.message') and node['.message']['t']=='f') and node['.message']['c'] or None; - file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None; + # Check if there are any "content files" we should render directly on the webpage + file_readme = (node.has_key('README') and node['README']['t']=='f') and node['README']['c'] or None; + file_message = (node.has_key('.message') and node['.message']['t']=='f') and node['.message']['c'] or None; + file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None; - del node + del node - return render_pgweb(request, 'download', 'downloads/ftpbrowser.html', { - 'basepath': subpath.rstrip('/'), - 'directories': directories, - 'files': sorted(files), - 'breadcrumbs': breadcrumbs, - 'readme': file_readme, - 'messagefile': file_message, - 'maintainer': file_maintainer, - }) + return render_pgweb(request, 'download', 'downloads/ftpbrowser.html', { + 'basepath': subpath.rstrip('/'), + 'directories': directories, + 'files': sorted(files), + 'breadcrumbs': breadcrumbs, + 'readme': file_readme, + 'messagefile': file_message, + 'maintainer': file_maintainer, + }) # Accept an upload of the ftpsite pickle. This is fairly resource consuming, @@ -125,135 +125,135 @@ def ftpbrowser(request, subpath): # file in parallel. @csrf_exempt def uploadftp(request): - if request.method != 'PUT': - return HttpServerError(request, "Invalid method") - if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS: - return HttpServerError(request, "Invalid client address") - # We have the data in request.body. Attempt to load it as - # a pickle to make sure it's properly formatted - pickle.loads(request.body) + if request.method != 'PUT': + return HttpServerError(request, "Invalid method") + if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS: + return HttpServerError(request, "Invalid client address") + # We have the data in request.body. Attempt to load it as + # a pickle to make sure it's properly formatted + pickle.loads(request.body) - # Next, check if it's the same as the current file - f = open(settings.FTP_PICKLE, "rb") - x = f.read() - f.close() - if x == request.body: - # Don't rewrite the file or purge any data if nothing changed - return HttpResponse("NOT CHANGED", content_type="text/plain") + # Next, check if it's the same as the current file + f = open(settings.FTP_PICKLE, "rb") + x = f.read() + f.close() + if x == request.body: + # Don't rewrite the file or purge any data if nothing changed + return HttpResponse("NOT CHANGED", content_type="text/plain") - # File has changed - let's write it! - f = open("%s.new" % settings.FTP_PICKLE, "wb") - f.write(request.body) - f.close() - os.rename("%s.new" % settings.FTP_PICKLE, settings.FTP_PICKLE) + # File has changed - let's write it! + f = open("%s.new" % settings.FTP_PICKLE, "wb") + f.write(request.body) + f.close() + os.rename("%s.new" % settings.FTP_PICKLE, settings.FTP_PICKLE) - # Purge it out of varnish so we start responding right away - varnish_purge("/ftp") + # Purge it out of varnish so we start responding right away + varnish_purge("/ftp") - # Finally, indicate to the client that we're happy - return HttpResponse("OK", content_type="text/plain") + # Finally, indicate to the client that we're happy + return HttpResponse("OK", content_type="text/plain") @csrf_exempt def uploadyum(request): - if request.method != 'PUT': - return HttpServerError(request, "Invalid method") - if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS: - return HttpServerError(request, "Invalid client address") - # We have the data in request.body. Attempt to load it as - # json to ensure correct format. - json.loads(request.body) + if request.method != 'PUT': + return HttpServerError(request, "Invalid method") + if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS: + return HttpServerError(request, "Invalid client address") + # We have the data in request.body. Attempt to load it as + # json to ensure correct format. + json.loads(request.body) - # Next, check if it's the same as the current file - if os.path.isfile(settings.YUM_JSON): - with open(settings.YUM_JSON, "r") as f: - if f.read() == request.body: - # Don't rewrite the file or purge any data if nothing changed - return HttpResponse("NOT CHANGED", content_type="text/plain") + # Next, check if it's the same as the current file + if os.path.isfile(settings.YUM_JSON): + with open(settings.YUM_JSON, "r") as f: + if f.read() == request.body: + # Don't rewrite the file or purge any data if nothing changed + return HttpResponse("NOT CHANGED", content_type="text/plain") - # File has changed - let's write it! - with open("%s.new" % settings.YUM_JSON, "w") as f: - f.write(request.body) + # File has changed - let's write it! + with open("%s.new" % settings.YUM_JSON, "w") as f: + f.write(request.body) - os.rename("%s.new" % settings.YUM_JSON, settings.YUM_JSON) + os.rename("%s.new" % settings.YUM_JSON, settings.YUM_JSON) - # Purge it out of varnish so we start responding right away - varnish_purge("/download/js/yum.js") + # Purge it out of varnish so we start responding right away + varnish_purge("/download/js/yum.js") - # Finally, indicate to the client that we're happy - return HttpResponse("OK", content_type="text/plain") + # Finally, indicate to the client that we're happy + return HttpResponse("OK", content_type="text/plain") @nocache def mirrorselect(request, path): - # Old access to mirrors will just redirect to the main ftp site. - # We don't really need it anymore, but the cost of keeping it is - # very low... - return HttpResponseRedirect("https://ftp.postgresql.org/pub/%s" % path) + # Old access to mirrors will just redirect to the main ftp site. + # We don't really need it anymore, but the cost of keeping it is + # very low... + return HttpResponseRedirect("https://ftp.postgresql.org/pub/%s" % path) # Render javascript for yum downloads def yum_js(request): - with open(settings.YUM_JSON) as f: - jsonstr = f.read() - return render(request, 'downloads/js/yum.js', { - 'json': jsonstr, - 'supported_versions': ','.join([str(v.numtree) for v in Version.objects.filter(supported=True)]), - }, content_type='application/json') + with open(settings.YUM_JSON) as f: + jsonstr = f.read() + return render(request, 'downloads/js/yum.js', { + 'json': jsonstr, + 'supported_versions': ','.join([str(v.numtree) for v in Version.objects.filter(supported=True)]), + }, content_type='application/json') ####### # Product catalogue ####### def categorylist(request): - categories = Category.objects.all() - return render_pgweb(request, 'download', 'downloads/categorylist.html', { - 'categories': categories, - }) + categories = Category.objects.all() + return render_pgweb(request, 'download', 'downloads/categorylist.html', { + 'categories': categories, + }) def productlist(request, catid, junk=None): - category = get_object_or_404(Category, pk=catid) - products = Product.objects.select_related('org','licencetype').filter(category=category, approved=True) - return render_pgweb(request, 'download', 'downloads/productlist.html', { - 'category': category, - 'products': products, - 'productcount': len(products), - }) + category = get_object_or_404(Category, pk=catid) + products = Product.objects.select_related('org','licencetype').filter(category=category, approved=True) + return render_pgweb(request, 'download', 'downloads/productlist.html', { + 'category': category, + 'products': products, + 'productcount': len(products), + }) @login_required def productform(request, itemid): - return simple_form(Product, itemid, request, ProductForm, - redirect='/account/edit/products/') + return simple_form(Product, itemid, request, ProductForm, + redirect='/account/edit/products/') ####### # Stackbuilder ####### def applications_v2_xml(request): - all_apps = StackBuilderApp.objects.select_related().filter(active=True) + all_apps = StackBuilderApp.objects.select_related().filter(active=True) - resp = HttpResponse(content_type='text/xml') - x = PgXmlHelper(resp, skipempty=True) - x.startDocument() - x.startElement('applications', {}) - for a in all_apps: - x.startElement('application', {}) - x.add_xml_element('id', a.textid) - x.add_xml_element('platform', a.platform) - x.add_xml_element('secondaryplatform', a.secondaryplatform) - x.add_xml_element('version', a.version) - x.add_xml_element('name', a.name) - x.add_xml_element('description', a.description) - x.add_xml_element('category', a.category) - x.add_xml_element('pgversion', a.pgversion) - x.add_xml_element('edbversion', a.edbversion) - x.add_xml_element('format', a.format) - x.add_xml_element('installoptions', a.installoptions) - x.add_xml_element('upgradeoptions', a.upgradeoptions) - x.add_xml_element('checksum', a.checksum) - x.add_xml_element('mirrorpath', a.mirrorpath) - x.add_xml_element('alturl', a.alturl) - x.add_xml_element('versionkey', a.versionkey) - x.add_xml_element('manifesturl', a.manifesturl) - for dep in a.txtdependencies.split(','): - x.add_xml_element('dependency', dep) - x.endElement('application') - x.endElement('applications') - x.endDocument() - return resp + resp = HttpResponse(content_type='text/xml') + x = PgXmlHelper(resp, skipempty=True) + x.startDocument() + x.startElement('applications', {}) + for a in all_apps: + x.startElement('application', {}) + x.add_xml_element('id', a.textid) + x.add_xml_element('platform', a.platform) + x.add_xml_element('secondaryplatform', a.secondaryplatform) + x.add_xml_element('version', a.version) + x.add_xml_element('name', a.name) + x.add_xml_element('description', a.description) + x.add_xml_element('category', a.category) + x.add_xml_element('pgversion', a.pgversion) + x.add_xml_element('edbversion', a.edbversion) + x.add_xml_element('format', a.format) + x.add_xml_element('installoptions', a.installoptions) + x.add_xml_element('upgradeoptions', a.upgradeoptions) + x.add_xml_element('checksum', a.checksum) + x.add_xml_element('mirrorpath', a.mirrorpath) + x.add_xml_element('alturl', a.alturl) + x.add_xml_element('versionkey', a.versionkey) + x.add_xml_element('manifesturl', a.manifesturl) + for dep in a.txtdependencies.split(','): + x.add_xml_element('dependency', dep) + x.endElement('application') + x.endElement('applications') + x.endDocument() + return resp diff --git a/pgweb/events/admin.py b/pgweb/events/admin.py index dd2084fa..2ac51df1 100644 --- a/pgweb/events/admin.py +++ b/pgweb/events/admin.py @@ -5,35 +5,35 @@ from pgweb.util.admin import PgwebAdmin from models import Event def approve_event(modeladmin, request, queryset): - # We need to do this in a loop even though it's less efficient, - # since using queryset.update() will not send the moderation messages. - for e in queryset: - e.approved = True - e.save() + # We need to do this in a loop even though it's less efficient, + # since using queryset.update() will not send the moderation messages. + for e in queryset: + e.approved = True + e.save() approve_event.short_description = 'Approve event' class EventAdminForm(forms.ModelForm): - class Meta: - model = Event - exclude = () + class Meta: + model = Event + exclude = () - def clean(self): - cleaned_data = super(EventAdminForm, self).clean() - if not cleaned_data.get('isonline'): - if not cleaned_data.get('city'): - self._errors['city'] = self.error_class(['City must be specified for non-online events']) - del cleaned_data['city'] - if not cleaned_data.get('country'): - self._errors['country'] = self.error_class(['Country must be specified for non-online events']) - del cleaned_data['country'] - return cleaned_data + def clean(self): + cleaned_data = super(EventAdminForm, self).clean() + if not cleaned_data.get('isonline'): + if not cleaned_data.get('city'): + self._errors['city'] = self.error_class(['City must be specified for non-online events']) + del cleaned_data['city'] + if not cleaned_data.get('country'): + self._errors['country'] = self.error_class(['Country must be specified for non-online events']) + del cleaned_data['country'] + return cleaned_data class EventAdmin(PgwebAdmin): - list_display = ('title', 'org', 'startdate', 'enddate', 'approved',) - list_filter = ('approved',) - search_fields = ('summary', 'details', 'title', ) - actions = [approve_event, ] - form = EventAdminForm + list_display = ('title', 'org', 'startdate', 'enddate', 'approved',) + list_filter = ('approved',) + search_fields = ('summary', 'details', 'title', ) + actions = [approve_event, ] + form = EventAdminForm admin.site.register(Event, EventAdmin) diff --git a/pgweb/events/feeds.py b/pgweb/events/feeds.py index 6273975e..21aa0d4b 100644 --- a/pgweb/events/feeds.py +++ b/pgweb/events/feeds.py @@ -5,17 +5,17 @@ from models import Event from datetime import datetime, time class EventFeed(Feed): - title = description = "PostgreSQL events" - link = "https://www.postgresql.org/" + title = description = "PostgreSQL events" + link = "https://www.postgresql.org/" - description_template = 'events/rss_description.html' - title_template = 'events/rss_title.html' + description_template = 'events/rss_description.html' + title_template = 'events/rss_title.html' - def items(self): - return Event.objects.filter(approved=True)[:10] + def items(self): + return Event.objects.filter(approved=True)[:10] - def item_link(self, obj): - return "https://www.postgresql.org/about/event/%s/" % obj.id + def item_link(self, obj): + return "https://www.postgresql.org/about/event/%s/" % obj.id - def item_pubdate(self, obj): - return datetime.combine(obj.startdate,time.min) + def item_pubdate(self, obj): + return datetime.combine(obj.startdate,time.min) diff --git a/pgweb/events/forms.py b/pgweb/events/forms.py index 9dfce0c8..460c5a76 100644 --- a/pgweb/events/forms.py +++ b/pgweb/events/forms.py @@ -5,45 +5,45 @@ from pgweb.core.models import Organisation from models import Event class EventForm(forms.ModelForm): - toggle_fields = [ - { - 'name': 'isonline', - 'invert': True, - 'fields': ['city', 'state', 'country',] - }, - ] - def __init__(self, *args, **kwargs): - super(EventForm, self).__init__(*args, **kwargs) - def filter_by_user(self, user): - self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) + toggle_fields = [ + { + 'name': 'isonline', + 'invert': True, + 'fields': ['city', 'state', 'country',] + }, + ] + def __init__(self, *args, **kwargs): + super(EventForm, self).__init__(*args, **kwargs) + def filter_by_user(self, user): + self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) - def clean(self): - cleaned_data = super(EventForm, self).clean() - if not cleaned_data.get('isonline'): - # Non online events require city and country - # (we don't require state, since many countries have no such thing) - if not cleaned_data.get('city'): - self._errors['city'] = self.error_class(['City must be specified for non-online events']) - del cleaned_data['city'] - if not cleaned_data.get('country'): - self._errors['country'] = self.error_class(['Country must be specified for non-online events']) - del cleaned_data['country'] - return cleaned_data + def clean(self): + cleaned_data = super(EventForm, self).clean() + if not cleaned_data.get('isonline'): + # Non online events require city and country + # (we don't require state, since many countries have no such thing) + if not cleaned_data.get('city'): + self._errors['city'] = self.error_class(['City must be specified for non-online events']) + del cleaned_data['city'] + if not cleaned_data.get('country'): + self._errors['country'] = self.error_class(['Country must be specified for non-online events']) + del cleaned_data['country'] + return cleaned_data - def clean_startdate(self): - if self.instance.pk and self.instance.approved: - if self.cleaned_data['startdate'] != self.instance.startdate: - raise ValidationError("You cannot change the dates on events that have been approved") - return self.cleaned_data['startdate'] + def clean_startdate(self): + if self.instance.pk and self.instance.approved: + if self.cleaned_data['startdate'] != self.instance.startdate: + raise ValidationError("You cannot change the dates on events that have been approved") + return self.cleaned_data['startdate'] - def clean_enddate(self): - if self.instance.pk and self.instance.approved: - if self.cleaned_data['enddate'] != self.instance.enddate: - raise ValidationError("You cannot change the dates on events that have been approved") - if self.cleaned_data.has_key('startdate') and self.cleaned_data['enddate'] < self.cleaned_data['startdate']: - raise ValidationError("End date cannot be before start date!") - return self.cleaned_data['enddate'] + def clean_enddate(self): + if self.instance.pk and self.instance.approved: + if self.cleaned_data['enddate'] != self.instance.enddate: + raise ValidationError("You cannot change the dates on events that have been approved") + if self.cleaned_data.has_key('startdate') and self.cleaned_data['enddate'] < self.cleaned_data['startdate']: + raise ValidationError("End date cannot be before start date!") + return self.cleaned_data['enddate'] - class Meta: - model = Event - exclude = ('submitter', 'approved', 'description_for_badged') + class Meta: + model = Event + exclude = ('submitter', 'approved', 'description_for_badged') diff --git a/pgweb/events/models.py b/pgweb/events/models.py index 8969afb5..f956d931 100644 --- a/pgweb/events/models.py +++ b/pgweb/events/models.py @@ -3,68 +3,68 @@ from django.db import models from pgweb.core.models import Country, Language, Organisation class Event(models.Model): - approved = models.BooleanField(null=False, blank=False, default=False) + approved = models.BooleanField(null=False, blank=False, default=False) - org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.") - title = models.CharField(max_length=100, null=False, blank=False) - isonline = models.BooleanField(null=False, default=False, verbose_name="Online event") - city = models.CharField(max_length=50, null=False, blank=True) - state = models.CharField(max_length=50, null=False, blank=True) - country = models.ForeignKey(Country, null=True, blank=True) - language = models.ForeignKey(Language, null=True, blank=True, default='eng', help_text="Primary language for event. When multiple languages, specify this in the event description") + org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.") + title = models.CharField(max_length=100, null=False, blank=False) + isonline = models.BooleanField(null=False, default=False, verbose_name="Online event") + city = models.CharField(max_length=50, null=False, blank=True) + state = models.CharField(max_length=50, null=False, blank=True) + country = models.ForeignKey(Country, null=True, blank=True) + language = models.ForeignKey(Language, null=True, blank=True, default='eng', help_text="Primary language for event. When multiple languages, specify this in the event description") - badged = models.BooleanField(null=False, blank=False, default=False, verbose_name='Community event', help_text='Choose "Community event" if this is a community recognized event following the community event guidelines.') - description_for_badged = models.TextField(blank=True, null=True, verbose_name='Description for community event', help_text='DEPRECRATED: This was used in the beginning of community events to collect additional information.') - startdate = models.DateField(null=False, blank=False, verbose_name="Start date") - enddate = models.DateField(null=False, blank=False, verbose_name="End date") + badged = models.BooleanField(null=False, blank=False, default=False, verbose_name='Community event', help_text='Choose "Community event" if this is a community recognized event following the community event guidelines.') + description_for_badged = models.TextField(blank=True, null=True, verbose_name='Description for community event', help_text='DEPRECRATED: This was used in the beginning of community events to collect additional information.') + startdate = models.DateField(null=False, blank=False, verbose_name="Start date") + enddate = models.DateField(null=False, blank=False, verbose_name="End date") - summary = models.TextField(blank=False, null=False, help_text="A short introduction (shown on the events listing page)") - details = models.TextField(blank=False, null=False, help_text="Complete event description") + summary = models.TextField(blank=False, null=False, help_text="A short introduction (shown on the events listing page)") + details = models.TextField(blank=False, null=False, help_text="Complete event description") - send_notification = True - markdown_fields = ('details', 'summary', ) + send_notification = True + markdown_fields = ('details', 'summary', ) - def purge_urls(self): - yield '/about/event/%s/' % self.pk - yield '/about/events/' - yield '/events.rss' - # FIXME: when to expire the front page? - yield '/$' + def purge_urls(self): + yield '/about/event/%s/' % self.pk + yield '/about/events/' + yield '/events.rss' + # FIXME: when to expire the front page? + yield '/$' - def __unicode__(self): - return "%s: %s" % (self.startdate, self.title) + def __unicode__(self): + return "%s: %s" % (self.startdate, self.title) - def verify_submitter(self, user): - return (len(self.org.managers.filter(pk=user.pk)) == 1) + def verify_submitter(self, user): + return (len(self.org.managers.filter(pk=user.pk)) == 1) - @property - def has_organisation(self): - mgrs = self.org.managers.all() - if len(mgrs) == 1: - if mgrs[0].pk == 0: - return False # Migration organisation - else: - return True # Has an actual organisation - elif len(mgrs) > 1: - # More than one manager means it must be new - return True - return False # Has no organisastion at all + @property + def has_organisation(self): + mgrs = self.org.managers.all() + if len(mgrs) == 1: + if mgrs[0].pk == 0: + return False # Migration organisation + else: + return True # Has an actual organisation + elif len(mgrs) > 1: + # More than one manager means it must be new + return True + return False # Has no organisastion at all - @property - def displaydate(self): - if self.startdate == self.enddate: - return self.startdate - else: - return "%s – %s" % (self.startdate, self.enddate) + @property + def displaydate(self): + if self.startdate == self.enddate: + return self.startdate + else: + return "%s – %s" % (self.startdate, self.enddate) - @property - def locationstring(self): - if self.isonline: - return "online" - elif self.state: - return "%s, %s, %s" % (self.city, self.state, self.country) - else: - return "%s, %s" % (self.city, self.country) + @property + def locationstring(self): + if self.isonline: + return "online" + elif self.state: + return "%s, %s, %s" % (self.city, self.state, self.country) + else: + return "%s, %s" % (self.city, self.country) - class Meta: - ordering = ('-startdate','-enddate',) + class Meta: + ordering = ('-startdate','-enddate',) diff --git a/pgweb/events/struct.py b/pgweb/events/struct.py index 488acb28..e60303dd 100644 --- a/pgweb/events/struct.py +++ b/pgweb/events/struct.py @@ -2,16 +2,16 @@ from datetime import date from models import Event def get_struct(): - now = date.today() + now = date.today() - # We intentionally don't put /about/eventarchive/ in the sitemap, - # since we don't care about getting it indexed. - # We only show events in the future, so only index events in the - # future... + # We intentionally don't put /about/eventarchive/ in the sitemap, + # since we don't care about getting it indexed. + # We only show events in the future, so only index events in the + # future... - for n in Event.objects.filter(approved=True, enddate__gt=now): - yearsold = (now - n.startdate).days / 365 - if yearsold > 4: - yearsold = 4 - yield ('about/event/%s/' % n.id, - 0.5-(yearsold/10.0)) + for n in Event.objects.filter(approved=True, enddate__gt=now): + yearsold = (now - n.startdate).days / 365 + if yearsold > 4: + yearsold = 4 + yield ('about/event/%s/' % n.id, + 0.5-(yearsold/10.0)) diff --git a/pgweb/events/views.py b/pgweb/events/views.py index ed00367d..d0fa7da8 100644 --- a/pgweb/events/views.py +++ b/pgweb/events/views.py @@ -11,39 +11,39 @@ from models import Event from forms import EventForm def main(request): - community_events = Event.objects.select_related('country').filter(approved=True, badged=True).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',) - other_events = Event.objects.select_related('country').filter(approved=True, badged=False).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',) - return render_pgweb(request, 'about', 'events/archive.html', { - 'title': 'Upcoming Events', - 'eventblocks': ( - { 'name': 'Community Events', 'events': community_events, 'link': '',}, - { 'name': 'Other Events', 'events': other_events, 'link': '',}, - ), - }) + community_events = Event.objects.select_related('country').filter(approved=True, badged=True).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',) + other_events = Event.objects.select_related('country').filter(approved=True, badged=False).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',) + return render_pgweb(request, 'about', 'events/archive.html', { + 'title': 'Upcoming Events', + 'eventblocks': ( + { 'name': 'Community Events', 'events': community_events, 'link': '',}, + { 'name': 'Other Events', 'events': other_events, 'link': '',}, + ), + }) def _eventarchive(request, title): - # Hardcode to the latest 100 events. Do we need paging too? - events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100] - return render_pgweb(request, 'about', 'events/archive.html', { - 'title': '%s Archive' % title, - 'archive': True, - 'eventblocks': ( - {'name': title, 'events': events, }, - ), - }) + # Hardcode to the latest 100 events. Do we need paging too? + events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100] + return render_pgweb(request, 'about', 'events/archive.html', { + 'title': '%s Archive' % title, + 'archive': True, + 'eventblocks': ( + {'name': title, 'events': events, }, + ), + }) def archive(request): - return _eventarchive(request, 'Event') + return _eventarchive(request, 'Event') def item(request, itemid, throwaway=None): - event = get_object_or_404(Event, pk=itemid) - if not event.approved: - raise Http404 - return render_pgweb(request, 'about', 'events/item.html', { - 'obj': event, - }) + event = get_object_or_404(Event, pk=itemid) + if not event.approved: + raise Http404 + return render_pgweb(request, 'about', 'events/item.html', { + 'obj': event, + }) @login_required def form(request, itemid): - return simple_form(Event, itemid, request, EventForm, - redirect='/account/edit/events/') + return simple_form(Event, itemid, request, EventForm, + redirect='/account/edit/events/') diff --git a/pgweb/featurematrix/admin.py b/pgweb/featurematrix/admin.py index 2aa1b8ce..1581c830 100644 --- a/pgweb/featurematrix/admin.py +++ b/pgweb/featurematrix/admin.py @@ -3,17 +3,17 @@ from django.contrib import admin from models import Feature, FeatureGroup class FeatureInline(admin.TabularInline): - model = Feature + model = Feature class FeatureGroupAdmin(admin.ModelAdmin): - inlines = [FeatureInline, ] - list_display = ('groupname', 'groupsort') - ordering = ['groupsort'] + inlines = [FeatureInline, ] + list_display = ('groupname', 'groupsort') + ordering = ['groupsort'] class FeatureAdmin(admin.ModelAdmin): - list_display = ('featurename', 'group') - list_filter = ('group',) - search_fields = ('featurename',) + list_display = ('featurename', 'group') + list_filter = ('group',) + search_fields = ('featurename',) admin.site.register(FeatureGroup, FeatureGroupAdmin) admin.site.register(Feature, FeatureAdmin) diff --git a/pgweb/featurematrix/migrations/0002_featurematrix_96.py b/pgweb/featurematrix/migrations/0002_featurematrix_96.py index df798fc0..833932eb 100644 --- a/pgweb/featurematrix/migrations/0002_featurematrix_96.py +++ b/pgweb/featurematrix/migrations/0002_featurematrix_96.py @@ -16,5 +16,5 @@ class Migration(migrations.Migration): name='v96', field=models.IntegerField(default=0, verbose_name=b'9.6', choices=[(0, b'No'), (1, b'Yes'), (2, b'Obsolete'), (3, b'?')]), ), - migrations.RunSQL("UPDATE featurematrix_feature SET v96=v95 WHERE NOT v96=v95"), + migrations.RunSQL("UPDATE featurematrix_feature SET v96=v95 WHERE NOT v96=v95"), ] diff --git a/pgweb/featurematrix/migrations/0003_feature_v10.py b/pgweb/featurematrix/migrations/0003_feature_v10.py index 6935faf3..182e0770 100644 --- a/pgweb/featurematrix/migrations/0003_feature_v10.py +++ b/pgweb/featurematrix/migrations/0003_feature_v10.py @@ -16,5 +16,5 @@ class Migration(migrations.Migration): name='v10', field=models.IntegerField(default=0, verbose_name=b'10', choices=[(0, b'No'), (1, b'Yes'), (2, b'Obsolete'), (3, b'?')]), ), - migrations.RunSQL("UPDATE featurematrix_feature SET v10=v96 WHERE NOT v10=v96"), + migrations.RunSQL("UPDATE featurematrix_feature SET v10=v96 WHERE NOT v10=v96"), ] diff --git a/pgweb/featurematrix/models.py b/pgweb/featurematrix/models.py index 433e3c3f..9ed6cbd6 100644 --- a/pgweb/featurematrix/models.py +++ b/pgweb/featurematrix/models.py @@ -9,58 +9,58 @@ choices_map = { choices = [(k, v['str']) for k,v in choices_map.items()] class FeatureGroup(models.Model): - groupname = models.CharField(max_length=100, null=False, blank=False) - groupsort = models.IntegerField(null=False, blank=False) + groupname = models.CharField(max_length=100, null=False, blank=False) + groupsort = models.IntegerField(null=False, blank=False) - purge_urls = ('/about/featurematrix/', ) + purge_urls = ('/about/featurematrix/', ) - def __unicode__(self): - return self.groupname + def __unicode__(self): + return self.groupname - @property - def columns(self): - # Return a list of all the columns for the matrix - return [b for a,b in versions] + @property + def columns(self): + # Return a list of all the columns for the matrix + return [b for a,b in versions] class Feature(models.Model): - group = models.ForeignKey(FeatureGroup, null=False, blank=False) - featurename = models.CharField(max_length=100, null=False, blank=False) - featuredescription = models.TextField(null=False, blank=True) - #WARNING! All fields that start with "v" will be considered versions! - v74 = models.IntegerField(verbose_name="7.4", null=False, blank=False, default=0, choices=choices) - v74.visible_default = False - v80 = models.IntegerField(verbose_name="8.0", null=False, blank=False, default=0, choices=choices) - v80.visible_default = False - v81 = models.IntegerField(verbose_name="8.1", null=False, blank=False, default=0, choices=choices) - v82 = models.IntegerField(verbose_name="8.2", null=False, blank=False, default=0, choices=choices) - v83 = models.IntegerField(verbose_name="8.3", null=False, blank=False, default=0, choices=choices) - v84 = models.IntegerField(verbose_name="8.4", null=False, blank=False, default=0, choices=choices) - v90 = models.IntegerField(verbose_name="9.0", null=False, blank=False, default=0, choices=choices) - v91 = models.IntegerField(verbose_name="9.1", null=False, blank=False, default=0, choices=choices) - v92 = models.IntegerField(verbose_name="9.2", null=False, blank=False, default=0, choices=choices) - v93 = models.IntegerField(verbose_name="9.3", null=False, blank=False, default=0, choices=choices) - v94 = models.IntegerField(verbose_name="9.4", null=False, blank=False, default=0, choices=choices) - v95 = models.IntegerField(verbose_name="9.5", null=False, blank=False, default=0, choices=choices) - v96 = models.IntegerField(verbose_name="9.6", null=False, blank=False, default=0, choices=choices) - v10 = models.IntegerField(verbose_name="10", null=False, blank=False, default=0, choices=choices) - v11 = models.IntegerField(verbose_name="11", null=False, blank=False, default=0, choices=choices) + group = models.ForeignKey(FeatureGroup, null=False, blank=False) + featurename = models.CharField(max_length=100, null=False, blank=False) + featuredescription = models.TextField(null=False, blank=True) + #WARNING! All fields that start with "v" will be considered versions! + v74 = models.IntegerField(verbose_name="7.4", null=False, blank=False, default=0, choices=choices) + v74.visible_default = False + v80 = models.IntegerField(verbose_name="8.0", null=False, blank=False, default=0, choices=choices) + v80.visible_default = False + v81 = models.IntegerField(verbose_name="8.1", null=False, blank=False, default=0, choices=choices) + v82 = models.IntegerField(verbose_name="8.2", null=False, blank=False, default=0, choices=choices) + v83 = models.IntegerField(verbose_name="8.3", null=False, blank=False, default=0, choices=choices) + v84 = models.IntegerField(verbose_name="8.4", null=False, blank=False, default=0, choices=choices) + v90 = models.IntegerField(verbose_name="9.0", null=False, blank=False, default=0, choices=choices) + v91 = models.IntegerField(verbose_name="9.1", null=False, blank=False, default=0, choices=choices) + v92 = models.IntegerField(verbose_name="9.2", null=False, blank=False, default=0, choices=choices) + v93 = models.IntegerField(verbose_name="9.3", null=False, blank=False, default=0, choices=choices) + v94 = models.IntegerField(verbose_name="9.4", null=False, blank=False, default=0, choices=choices) + v95 = models.IntegerField(verbose_name="9.5", null=False, blank=False, default=0, choices=choices) + v96 = models.IntegerField(verbose_name="9.6", null=False, blank=False, default=0, choices=choices) + v10 = models.IntegerField(verbose_name="10", null=False, blank=False, default=0, choices=choices) + v11 = models.IntegerField(verbose_name="11", null=False, blank=False, default=0, choices=choices) - purge_urls = ('/about/featurematrix/.*', ) + purge_urls = ('/about/featurematrix/.*', ) - def __unicode__(self): - # To make it look good in the admin interface, just don't render it - return '' + def __unicode__(self): + # To make it look good in the admin interface, just don't render it + return '' - def columns(self): - # Get a list of column based on all versions that are visible_default - return [choices_map[getattr(self, a)] for a,b in versions] + def columns(self): + # Get a list of column based on all versions that are visible_default + return [choices_map[getattr(self, a)] for a,b in versions] - @property - def featurelink(self): - if self.featuredescription.startswith('https://') or self.featuredescription.startswith('http://'): - return self.featuredescription - else: - return 'detail/%s/' % self.id + @property + def featurelink(self): + if self.featuredescription.startswith('https://') or self.featuredescription.startswith('http://'): + return self.featuredescription + else: + return 'detail/%s/' % self.id versions = [(f.name,f.verbose_name) for f in Feature()._meta.fields if f.name.startswith('v') and getattr(f, 'visible_default', True)] versions = sorted(versions, key=lambda f: -float(f[1])) diff --git a/pgweb/featurematrix/struct.py b/pgweb/featurematrix/struct.py index c3eef315..3601f227 100644 --- a/pgweb/featurematrix/struct.py +++ b/pgweb/featurematrix/struct.py @@ -1,2 +1,2 @@ def get_struct(): - yield ('about/featurematrix/', None) + yield ('about/featurematrix/', None) diff --git a/pgweb/featurematrix/views.py b/pgweb/featurematrix/views.py index bd445390..a50c9fd0 100644 --- a/pgweb/featurematrix/views.py +++ b/pgweb/featurematrix/views.py @@ -6,31 +6,31 @@ from pgweb.core.models import Version from models import Feature def root(request): - features = Feature.objects.all().select_related().order_by('group__groupsort', 'group__groupname', 'featurename') - groups = [] - lastgroup = -1 - currentgroup = None - for f in features: - if f.group.id != lastgroup: - if currentgroup: - groups.append(currentgroup) - lastgroup = f.group.id - currentgroup = { - 'group': f.group, - 'features': [], - } - currentgroup['features'].append(f) - if currentgroup: - groups.append(currentgroup) + features = Feature.objects.all().select_related().order_by('group__groupsort', 'group__groupname', 'featurename') + groups = [] + lastgroup = -1 + currentgroup = None + for f in features: + if f.group.id != lastgroup: + if currentgroup: + groups.append(currentgroup) + lastgroup = f.group.id + currentgroup = { + 'group': f.group, + 'features': [], + } + currentgroup['features'].append(f) + if currentgroup: + groups.append(currentgroup) - versions = Version.objects.filter(tree__gte='8.1').order_by('-tree') - return render_pgweb(request, 'about', 'featurematrix/featurematrix.html', { - 'groups': groups, - 'versions': versions, - }) + versions = Version.objects.filter(tree__gte='8.1').order_by('-tree') + return render_pgweb(request, 'about', 'featurematrix/featurematrix.html', { + 'groups': groups, + 'versions': versions, + }) def detail(request, featureid): - feature = get_object_or_404(Feature, pk=featureid) - return render_pgweb(request, 'about', 'featurematrix/featuredetail.html', { - 'feature': feature, - }) + feature = get_object_or_404(Feature, pk=featureid) + return render_pgweb(request, 'about', 'featurematrix/featuredetail.html', { + 'feature': feature, + }) diff --git a/pgweb/legacyurl/views.py b/pgweb/legacyurl/views.py index 1a7823b2..5fda8fca 100644 --- a/pgweb/legacyurl/views.py +++ b/pgweb/legacyurl/views.py @@ -1,5 +1,5 @@ from django.http import HttpResponseRedirect def mailpref(request, listname): - # Just redirect to the homepage of pglister, don't try specific lists - return HttpResponseRedirect("https://lists.postgresql.org/") + # Just redirect to the homepage of pglister, don't try specific lists + return HttpResponseRedirect("https://lists.postgresql.org/") diff --git a/pgweb/lists/management/commands/sync_lists.py b/pgweb/lists/management/commands/sync_lists.py index 53c89633..fe2701a6 100644 --- a/pgweb/lists/management/commands/sync_lists.py +++ b/pgweb/lists/management/commands/sync_lists.py @@ -8,52 +8,52 @@ from django.conf import settings import requests class Command(BaseCommand): - help = 'Synchronize mailinglists' + help = 'Synchronize mailinglists' - def add_arguments(self, parser): - parser.add_argument('--dryrun', action='store_true', help="Don't commit changes") + def add_arguments(self, parser): + parser.add_argument('--dryrun', action='store_true', help="Don't commit changes") - def handle(self, *args, **options): - if settings.ARCHIVES_SEARCH_PLAINTEXT: - proto="http" - else: - proto="https" - r = requests.get('{0}://{1}/listinfo/'.format(proto, settings.ARCHIVES_SEARCH_SERVER)) - j = r.json() - allgroups = list(set([l['group'] for l in j])) - with transaction.atomic(): - curs = connection.cursor() + def handle(self, *args, **options): + if settings.ARCHIVES_SEARCH_PLAINTEXT: + proto="http" + else: + proto="https" + r = requests.get('{0}://{1}/listinfo/'.format(proto, settings.ARCHIVES_SEARCH_SERVER)) + j = r.json() + allgroups = list(set([l['group'] for l in j])) + with transaction.atomic(): + curs = connection.cursor() - # Add any groups necessary - curs.execute("INSERT INTO lists_mailinglistgroup (groupname, sortkey) SELECT n,50 FROM UNNEST(%s) n(n) WHERE NOT EXISTS (SELECT 1 FROM lists_mailinglistgroup WHERE groupname=n) RETURNING groupname", (allgroups,)) - for n, in curs.fetchall(): - print "Added group %s" % n + # Add any groups necessary + curs.execute("INSERT INTO lists_mailinglistgroup (groupname, sortkey) SELECT n,50 FROM UNNEST(%s) n(n) WHERE NOT EXISTS (SELECT 1 FROM lists_mailinglistgroup WHERE groupname=n) RETURNING groupname", (allgroups,)) + for n, in curs.fetchall(): + print "Added group %s" % n - # Add and update lists - for l in j: - curs.execute("SELECT id FROM lists_mailinglist WHERE listname=%s", (l['name'],)) - if curs.rowcount == 0: - curs.execute("INSERT INTO lists_mailinglist (listname, group_id, active, description, shortdesc) VALUES (%s, (SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), %s, %s, %s)", ( - l['name'], l['group'], l['active'], l['description'], l['shortdesc'])) - print "Added list %s" % l['name'] - else: - curs.execute("UPDATE lists_mailinglist SET group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), active=%s, description=%s, shortdesc=%s WHERE listname=%s AND NOT (group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s) AND active=%s AND description=%s AND shortdesc=%s) RETURNING listname", ( - l['group'], l['active'], l['description'], l['shortdesc'], - l['name'], - l['group'], l['active'], l['description'], l['shortdesc'], - )) - for n, in curs.fetchall(): - print "Updated list %s" % n + # Add and update lists + for l in j: + curs.execute("SELECT id FROM lists_mailinglist WHERE listname=%s", (l['name'],)) + if curs.rowcount == 0: + curs.execute("INSERT INTO lists_mailinglist (listname, group_id, active, description, shortdesc) VALUES (%s, (SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), %s, %s, %s)", ( + l['name'], l['group'], l['active'], l['description'], l['shortdesc'])) + print "Added list %s" % l['name'] + else: + curs.execute("UPDATE lists_mailinglist SET group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), active=%s, description=%s, shortdesc=%s WHERE listname=%s AND NOT (group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s) AND active=%s AND description=%s AND shortdesc=%s) RETURNING listname", ( + l['group'], l['active'], l['description'], l['shortdesc'], + l['name'], + l['group'], l['active'], l['description'], l['shortdesc'], + )) + for n, in curs.fetchall(): + print "Updated list %s" % n - # Delete any lists that shouldn't exist anymore (this is safe because we don't keep any data about them, - # so they are trivial to add back) - curs.execute("DELETE FROM lists_mailinglist WHERE NOT listname=ANY(%s) RETURNING listname", ([l['name'] for l in j],)) - for n, in curs.fetchall(): - print "Deleted list %s" % n - # Delete listgroups - curs.execute("DELETE FROM lists_mailinglistgroup WHERE NOT groupname=ANY(%s) RETURNING groupname", (allgroups,)) - for n, in curs.fetchall(): - print "Deleted group %s" % n + # Delete any lists that shouldn't exist anymore (this is safe because we don't keep any data about them, + # so they are trivial to add back) + curs.execute("DELETE FROM lists_mailinglist WHERE NOT listname=ANY(%s) RETURNING listname", ([l['name'] for l in j],)) + for n, in curs.fetchall(): + print "Deleted list %s" % n + # Delete listgroups + curs.execute("DELETE FROM lists_mailinglistgroup WHERE NOT groupname=ANY(%s) RETURNING groupname", (allgroups,)) + for n, in curs.fetchall(): + print "Deleted group %s" % n - if options['dryrun']: - raise CommandError("Dry run, rolling back") + if options['dryrun']: + raise CommandError("Dry run, rolling back") diff --git a/pgweb/lists/models.py b/pgweb/lists/models.py index 9c7aa720..0d398c9a 100644 --- a/pgweb/lists/models.py +++ b/pgweb/lists/models.py @@ -1,38 +1,38 @@ from django.db import models class MailingListGroup(models.Model): - groupname = models.CharField(max_length=64, null=False, blank=False) - sortkey = models.IntegerField(null=False, default=10) + groupname = models.CharField(max_length=64, null=False, blank=False) + sortkey = models.IntegerField(null=False, default=10) - purge_urls = ('/community/lists/', ) + purge_urls = ('/community/lists/', ) - @property - def negid(self): - return -self.id + @property + def negid(self): + return -self.id - def __unicode__(self): - return self.groupname + def __unicode__(self): + return self.groupname - class Meta: - ordering = ('sortkey', ) + class Meta: + ordering = ('sortkey', ) class MailingList(models.Model): - group = models.ForeignKey(MailingListGroup, null=False) - listname = models.CharField(max_length=64, null=False, blank=False, unique=True) - active = models.BooleanField(null=False, default=False) - description = models.TextField(null=False, blank=True) - shortdesc = models.TextField(null=False, blank=True) + group = models.ForeignKey(MailingListGroup, null=False) + listname = models.CharField(max_length=64, null=False, blank=False, unique=True) + active = models.BooleanField(null=False, default=False) + description = models.TextField(null=False, blank=True) + shortdesc = models.TextField(null=False, blank=True) - purge_urls = ('/community/lists/', ) + purge_urls = ('/community/lists/', ) - @property - def maybe_shortdesc(self): - if self.shortdesc: - return self.shortdesc - return self.listname + @property + def maybe_shortdesc(self): + if self.shortdesc: + return self.shortdesc + return self.listname - def __unicode__(self): - return self.listname + def __unicode__(self): + return self.listname - class Meta: - ordering = ('listname', ) + class Meta: + ordering = ('listname', ) diff --git a/pgweb/lists/struct.py b/pgweb/lists/struct.py index e42eb5df..02509b97 100644 --- a/pgweb/lists/struct.py +++ b/pgweb/lists/struct.py @@ -1,2 +1,2 @@ def get_struct(): - yield ('community/lists/', None) + yield ('community/lists/', None) diff --git a/pgweb/lists/views.py b/pgweb/lists/views.py index 46ce8dc4..62db498f 100644 --- a/pgweb/lists/views.py +++ b/pgweb/lists/views.py @@ -5,19 +5,19 @@ import json from models import MailingList, MailingListGroup def listinfo(request): - resp = HttpResponse(content_type='application/json') - groupdata = [ { - 'id': g.id, - 'name': g.groupname, - 'sort': g.sortkey, - } for g in MailingListGroup.objects.all()] - listdata = [ { - 'id': l.id, - 'name': l.listname, - 'groupid': l.group_id, - 'active': l.active, - 'shortdesc': l.shortdesc, - 'description': l.description, - } for l in MailingList.objects.all()] - json.dump({'groups': groupdata, 'lists': listdata}, resp) - return resp + resp = HttpResponse(content_type='application/json') + groupdata = [ { + 'id': g.id, + 'name': g.groupname, + 'sort': g.sortkey, + } for g in MailingListGroup.objects.all()] + listdata = [ { + 'id': l.id, + 'name': l.listname, + 'groupid': l.group_id, + 'active': l.active, + 'shortdesc': l.shortdesc, + 'description': l.description, + } for l in MailingList.objects.all()] + json.dump({'groups': groupdata, 'lists': listdata}, resp) + return resp diff --git a/pgweb/mailqueue/admin.py b/pgweb/mailqueue/admin.py index 50ddc9e3..801d1163 100644 --- a/pgweb/mailqueue/admin.py +++ b/pgweb/mailqueue/admin.py @@ -5,26 +5,26 @@ from email.parser import Parser from models import QueuedMail class QueuedMailAdmin(admin.ModelAdmin): - model = QueuedMail - readonly_fields = ('parsed_content', ) + model = QueuedMail + readonly_fields = ('parsed_content', ) - def parsed_content(self, obj): - # We only try to parse the *first* piece, because we assume - # all our emails are trivial. - try: - parser = Parser() - msg = parser.parsestr(obj.fullmsg) - b = msg.get_payload(decode=True) - if b: return b + def parsed_content(self, obj): + # We only try to parse the *first* piece, because we assume + # all our emails are trivial. + try: + parser = Parser() + msg = parser.parsestr(obj.fullmsg) + b = msg.get_payload(decode=True) + if b: return b - pl = msg.get_payload() - for p in pl: - b = p.get_payload(decode=True) - if b: return b - return "Could not find body" - except Exception, e: - return "Failed to get body: %s" % e + pl = msg.get_payload() + for p in pl: + b = p.get_payload(decode=True) + if b: return b + return "Could not find body" + except Exception, e: + return "Failed to get body: %s" % e - parsed_content.short_description = 'Parsed mail' + parsed_content.short_description = 'Parsed mail' admin.site.register(QueuedMail, QueuedMailAdmin) diff --git a/pgweb/mailqueue/management/commands/send_queued_mail.py b/pgweb/mailqueue/management/commands/send_queued_mail.py index 59508185..5e019fb6 100755 --- a/pgweb/mailqueue/management/commands/send_queued_mail.py +++ b/pgweb/mailqueue/management/commands/send_queued_mail.py @@ -14,35 +14,35 @@ import smtplib from pgweb.mailqueue.models import QueuedMail class Command(BaseCommand): - help = 'Send queued mail' + help = 'Send queued mail' - def handle(self, *args, **options): - # Grab advisory lock, if available. Lock id is just a random number - # since we only need to interlock against ourselves. The lock is - # automatically released when we're done. - curs = connection.cursor() - curs.execute("SELECT pg_try_advisory_lock(72181372)") - if not curs.fetchall()[0][0]: - raise CommandError("Failed to get advisory lock, existing send_queued_mail process stuck?") + def handle(self, *args, **options): + # Grab advisory lock, if available. Lock id is just a random number + # since we only need to interlock against ourselves. The lock is + # automatically released when we're done. + curs = connection.cursor() + curs.execute("SELECT pg_try_advisory_lock(72181372)") + if not curs.fetchall()[0][0]: + raise CommandError("Failed to get advisory lock, existing send_queued_mail process stuck?") - for m in QueuedMail.objects.all(): - # Yes, we do a new connection for each run. Just because we can. - # If it fails we'll throw an exception and just come back on the - # next cron job. And local delivery should never fail... - if m.usergenerated: - # User generated email gets relayed directly over a frontend - smtphost = settings.FRONTEND_SMTP_RELAY - else: - smtphost = 'localhost' - smtp = smtplib.SMTP(smtphost) - try: - smtp.sendmail(m.sender, m.receiver, m.fullmsg.encode('utf-8')) - except (smtplib.SMTPSenderRefused, smtplib.SMTPRecipientsRefused, smtplib.SMTPDataError): - # If this was user generated, this indicates the antispam - # kicking in, so we just ignore it. If it's anything else, - # we want to let the exception through. - if not m.usergenerated: - raise + for m in QueuedMail.objects.all(): + # Yes, we do a new connection for each run. Just because we can. + # If it fails we'll throw an exception and just come back on the + # next cron job. And local delivery should never fail... + if m.usergenerated: + # User generated email gets relayed directly over a frontend + smtphost = settings.FRONTEND_SMTP_RELAY + else: + smtphost = 'localhost' + smtp = smtplib.SMTP(smtphost) + try: + smtp.sendmail(m.sender, m.receiver, m.fullmsg.encode('utf-8')) + except (smtplib.SMTPSenderRefused, smtplib.SMTPRecipientsRefused, smtplib.SMTPDataError): + # If this was user generated, this indicates the antispam + # kicking in, so we just ignore it. If it's anything else, + # we want to let the exception through. + if not m.usergenerated: + raise - smtp.close() - m.delete() + smtp.close() + m.delete() diff --git a/pgweb/mailqueue/models.py b/pgweb/mailqueue/models.py index 36d73eee..10c50f3d 100644 --- a/pgweb/mailqueue/models.py +++ b/pgweb/mailqueue/models.py @@ -1,14 +1,14 @@ from django.db import models class QueuedMail(models.Model): - sender = models.EmailField(max_length=100, null=False, blank=False) - receiver = models.EmailField(max_length=100, null=False, blank=False) - # We store the raw MIME message, so if there are any attachments or - # anything, we just push them right in there! - fullmsg = models.TextField(null=False, blank=False) - # Flag if the message is "user generated", so we can treat those - # separately from an antispam and delivery perspective. - usergenerated = models.BooleanField(null=False, blank=False, default=False) + sender = models.EmailField(max_length=100, null=False, blank=False) + receiver = models.EmailField(max_length=100, null=False, blank=False) + # We store the raw MIME message, so if there are any attachments or + # anything, we just push them right in there! + fullmsg = models.TextField(null=False, blank=False) + # Flag if the message is "user generated", so we can treat those + # separately from an antispam and delivery perspective. + usergenerated = models.BooleanField(null=False, blank=False, default=False) - def __unicode__(self): - return "%s: %s -> %s" % (self.pk, self.sender, self.receiver) + def __unicode__(self): + return "%s: %s -> %s" % (self.pk, self.sender, self.receiver) diff --git a/pgweb/mailqueue/util.py b/pgweb/mailqueue/util.py index 9f09e728..71c63350 100644 --- a/pgweb/mailqueue/util.py +++ b/pgweb/mailqueue/util.py @@ -9,49 +9,49 @@ from email.header import Header from models import QueuedMail def _encoded_email_header(name, email): - if name: - return formataddr((str(Header(name, 'utf-8')), email)) - return email + if name: + return formataddr((str(Header(name, 'utf-8')), email)) + return email def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, usergenerated=False, cc=None, replyto=None, sendername=None, receivername=None, messageid=None): - # attachment format, each is a tuple of (name, mimetype,contents) - # content should be *binary* and not base64 encoded, since we need to - # use the base64 routines from the email library to get a properly - # formatted output message - msg = MIMEMultipart() - msg['Subject'] = subject - msg['To'] = _encoded_email_header(receivername, receiver) - msg['From'] = _encoded_email_header(sendername, sender) - if cc: - msg['Cc'] = cc - if replyto: - msg['Reply-To'] = replyto - msg['Date'] = formatdate(localtime=True) - if messageid: - msg['Message-ID'] = messageid - else: - msg['Message-ID'] = make_msgid() + # attachment format, each is a tuple of (name, mimetype,contents) + # content should be *binary* and not base64 encoded, since we need to + # use the base64 routines from the email library to get a properly + # formatted output message + msg = MIMEMultipart() + msg['Subject'] = subject + msg['To'] = _encoded_email_header(receivername, receiver) + msg['From'] = _encoded_email_header(sendername, sender) + if cc: + msg['Cc'] = cc + if replyto: + msg['Reply-To'] = replyto + msg['Date'] = formatdate(localtime=True) + if messageid: + msg['Message-ID'] = messageid + else: + msg['Message-ID'] = make_msgid() - msg.attach(MIMEText(msgtxt, _charset='utf-8')) + msg.attach(MIMEText(msgtxt, _charset='utf-8')) - if attachments: - for filename, contenttype, content in attachments: - main,sub = contenttype.split('/') - part = MIMENonMultipart(main,sub) - part.set_payload(content) - part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename) - encoders.encode_base64(part) - msg.attach(part) + if attachments: + for filename, contenttype, content in attachments: + main,sub = contenttype.split('/') + part = MIMENonMultipart(main,sub) + part.set_payload(content) + part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename) + encoders.encode_base64(part) + msg.attach(part) - # Just write it to the queue, so it will be transactionally rolled back - QueuedMail(sender=sender, receiver=receiver, fullmsg=msg.as_string(), usergenerated=usergenerated).save() - if cc: - # Write a second copy for the cc, wihch will be delivered - # directly to the recipient. (The sender doesn't parse the - # message content to extract cc fields). - QueuedMail(sender=sender, receiver=cc, fullmsg=msg.as_string(), usergenerated=usergenerated).save() + # Just write it to the queue, so it will be transactionally rolled back + QueuedMail(sender=sender, receiver=receiver, fullmsg=msg.as_string(), usergenerated=usergenerated).save() + if cc: + # Write a second copy for the cc, wihch will be delivered + # directly to the recipient. (The sender doesn't parse the + # message content to extract cc fields). + QueuedMail(sender=sender, receiver=cc, fullmsg=msg.as_string(), usergenerated=usergenerated).save() def send_mail(sender, receiver, fullmsg, usergenerated=False): - # Send an email, prepared as the full MIME encoded mail already - QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg, usergenerated=False).save() + # Send an email, prepared as the full MIME encoded mail already + QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg, usergenerated=False).save() diff --git a/pgweb/misc/forms.py b/pgweb/misc/forms.py index 7ddd57f1..b8b46b89 100644 --- a/pgweb/misc/forms.py +++ b/pgweb/misc/forms.py @@ -4,30 +4,30 @@ from django.db.models import Q from pgweb.core.models import Version class _version_choices(): - def __iter__(self): - yield ('-1', '** Select version') - q = Q(supported=True) | Q(testing__gt=0) - for v in Version.objects.filter(q): - for minor in range(v.latestminor,-1,-1): - if not v.testing or minor>0: - # For beta/rc versions, there is no beta0, so exclude it - s = v.buildversionstring(minor) - yield (s,s) - yield ('Unsupported/Unknown', 'Unsupported/Unknown') + def __iter__(self): + yield ('-1', '** Select version') + q = Q(supported=True) | Q(testing__gt=0) + for v in Version.objects.filter(q): + for minor in range(v.latestminor,-1,-1): + if not v.testing or minor>0: + # For beta/rc versions, there is no beta0, so exclude it + s = v.buildversionstring(minor) + yield (s,s) + yield ('Unsupported/Unknown', 'Unsupported/Unknown') class SubmitBugForm(forms.Form): - name = forms.CharField(max_length=100, required=True) - email = forms.EmailField(max_length=100, required=True) - pgversion = forms.CharField(max_length=20, required=True, - label="PostgreSQL version", - widget=forms.Select(choices=_version_choices())) - os = forms.CharField(max_length=50, required=True, - label="Operating system") - shortdesc = forms.CharField(max_length=100, required=True, - label="Short description") - details = forms.CharField(required=True, widget=forms.Textarea) + name = forms.CharField(max_length=100, required=True) + email = forms.EmailField(max_length=100, required=True) + pgversion = forms.CharField(max_length=20, required=True, + label="PostgreSQL version", + widget=forms.Select(choices=_version_choices())) + os = forms.CharField(max_length=50, required=True, + label="Operating system") + shortdesc = forms.CharField(max_length=100, required=True, + label="Short description") + details = forms.CharField(required=True, widget=forms.Textarea) - def clean_pgversion(self): - if self.cleaned_data.get('pgversion') == '-1': - raise forms.ValidationError('You must select a version') - return self.cleaned_data.get('pgversion') + def clean_pgversion(self): + if self.cleaned_data.get('pgversion') == '-1': + raise forms.ValidationError('You must select a version') + return self.cleaned_data.get('pgversion') diff --git a/pgweb/misc/models.py b/pgweb/misc/models.py index 90c7e240..58148e70 100644 --- a/pgweb/misc/models.py +++ b/pgweb/misc/models.py @@ -1,7 +1,7 @@ from django.db import models class BugIdMap(models.Model): - # Explicit id field because we don't want a SERIAL here, since we generate - # the actual bug IDs externally. - id = models.IntegerField(null=False, blank=False, primary_key=True) - messageid = models.CharField(max_length=500, null=False, blank=False) + # Explicit id field because we don't want a SERIAL here, since we generate + # the actual bug IDs externally. + id = models.IntegerField(null=False, blank=False, primary_key=True) + messageid = models.CharField(max_length=500, null=False, blank=False) diff --git a/pgweb/misc/views.py b/pgweb/misc/views.py index aca74eed..9656da81 100644 --- a/pgweb/misc/views.py +++ b/pgweb/misc/views.py @@ -18,76 +18,76 @@ from pgweb.misc.models import BugIdMap from forms import SubmitBugForm def _make_bugs_messageid(bugid): - return "<{0}-{1}@postgresql.org>".format( - bugid, - hashlib.md5("{0}-{1}".format(os.getpid(), time.time())).hexdigest()[:16], - ) + return "<{0}-{1}@postgresql.org>".format( + bugid, + hashlib.md5("{0}-{1}".format(os.getpid(), time.time())).hexdigest()[:16], + ) @login_required def submitbug(request): - if request.method == 'POST': - form = SubmitBugForm(request.POST) - if form.is_valid(): - with transaction.atomic(): - c = connection.cursor() - c.execute("SELECT nextval('bug_id_seq')") - bugid = c.fetchall()[0][0] + if request.method == 'POST': + form = SubmitBugForm(request.POST) + if form.is_valid(): + with transaction.atomic(): + c = connection.cursor() + c.execute("SELECT nextval('bug_id_seq')") + bugid = c.fetchall()[0][0] - messageid = _make_bugs_messageid(bugid) + messageid = _make_bugs_messageid(bugid) - BugIdMap(id=bugid, messageid=messageid.strip('<>')).save() + BugIdMap(id=bugid, messageid=messageid.strip('<>')).save() - send_template_mail( - settings.BUGREPORT_NOREPLY_EMAIL, - settings.BUGREPORT_EMAIL, - 'BUG #%s: %s' % (bugid, form.cleaned_data['shortdesc']), - 'misc/bugmail.txt', - { - 'bugid': bugid, - 'bug': form.cleaned_data, - }, - usergenerated=True, - cc=form.cleaned_data['email'], - replyto='%s, %s' % (form.cleaned_data['email'], settings.BUGREPORT_EMAIL), - sendername="PG Bug reporting form", - messageid=messageid, - ) + send_template_mail( + settings.BUGREPORT_NOREPLY_EMAIL, + settings.BUGREPORT_EMAIL, + 'BUG #%s: %s' % (bugid, form.cleaned_data['shortdesc']), + 'misc/bugmail.txt', + { + 'bugid': bugid, + 'bug': form.cleaned_data, + }, + usergenerated=True, + cc=form.cleaned_data['email'], + replyto='%s, %s' % (form.cleaned_data['email'], settings.BUGREPORT_EMAIL), + sendername="PG Bug reporting form", + messageid=messageid, + ) - return HttpResponseRedirect("/account/submitbug/{0}/".format(bugid)) - else: - form = SubmitBugForm(initial={ - 'name': '%s %s' % (request.user.first_name, request.user.last_name), - 'email': request.user.email, - }) + return HttpResponseRedirect("/account/submitbug/{0}/".format(bugid)) + else: + form = SubmitBugForm(initial={ + 'name': '%s %s' % (request.user.first_name, request.user.last_name), + 'email': request.user.email, + }) - versions = Version.objects.filter(supported=True) + versions = Version.objects.filter(supported=True) - return render_pgweb(request, 'support', 'base/form.html', { - 'form': form, - 'formitemtype': 'bug report', - 'formtitle': 'Submit Bug Report ', - 'operation': 'Submit', - 'form_intro': template_to_string('misc/bug_header.html', { - 'supportedversions': versions, - }), - 'savebutton': 'Submit and Send Email', - }) + return render_pgweb(request, 'support', 'base/form.html', { + 'form': form, + 'formitemtype': 'bug report', + 'formtitle': 'Submit Bug Report ', + 'operation': 'Submit', + 'form_intro': template_to_string('misc/bug_header.html', { + 'supportedversions': versions, + }), + 'savebutton': 'Submit and Send Email', + }) @login_required def submitbug_done(request, bugid): - return render_pgweb(request, 'support', 'misc/bug_completed.html', { - 'bugid': bugid, - }) + return render_pgweb(request, 'support', 'misc/bug_completed.html', { + 'bugid': bugid, + }) def bugs_redir(request, bugid): - r = get_object_or_404(BugIdMap, id=bugid) + r = get_object_or_404(BugIdMap, id=bugid) - return HttpResponseRedirect("{0}/message-id/{1}".format(settings.SITE_ROOT, r.messageid)) + return HttpResponseRedirect("{0}/message-id/{1}".format(settings.SITE_ROOT, r.messageid)) # A crash testing URL. If the file /tmp/crashtest exists, raise a http 500 # error. Otherwise, just return a fixed text response def crashtest(request): - if os.path.exists('/tmp/crashtest'): - raise Exception('This is a manual test of a crash!') - else: - return HttpResponse('Crash testing disabled', content_type='text/plain') + if os.path.exists('/tmp/crashtest'): + raise Exception('This is a manual test of a crash!') + else: + return HttpResponse('Crash testing disabled', content_type='text/plain') diff --git a/pgweb/news/admin.py b/pgweb/news/admin.py index 93ca0dfc..44734b7c 100644 --- a/pgweb/news/admin.py +++ b/pgweb/news/admin.py @@ -4,21 +4,21 @@ from pgweb.util.admin import PgwebAdmin from models import NewsArticle, NewsTag class NewsArticleAdmin(PgwebAdmin): - list_display = ('title', 'org', 'date', 'approved', ) - list_filter = ('approved', ) - filter_horizontal = ('tags', ) - search_fields = ('content', 'title', ) - change_form_template = 'admin/news/newsarticle/change_form.html' + list_display = ('title', 'org', 'date', 'approved', ) + list_filter = ('approved', ) + filter_horizontal = ('tags', ) + search_fields = ('content', 'title', ) + change_form_template = 'admin/news/newsarticle/change_form.html' - def change_view(self, request, object_id, extra_context=None): - newsarticle = NewsArticle.objects.get(pk=object_id) - my_context = { - 'latest': NewsArticle.objects.filter(org=newsarticle.org)[:10] - } - return super(NewsArticleAdmin, self).change_view(request, object_id, extra_context=my_context) + def change_view(self, request, object_id, extra_context=None): + newsarticle = NewsArticle.objects.get(pk=object_id) + my_context = { + 'latest': NewsArticle.objects.filter(org=newsarticle.org)[:10] + } + return super(NewsArticleAdmin, self).change_view(request, object_id, extra_context=my_context) class NewsTagAdmin(PgwebAdmin): - list_display = ('urlname', 'name', 'description') + list_display = ('urlname', 'name', 'description') admin.site.register(NewsArticle, NewsArticleAdmin) admin.site.register(NewsTag, NewsTagAdmin) diff --git a/pgweb/news/feeds.py b/pgweb/news/feeds.py index 6c77fe3c..b28ad8c7 100644 --- a/pgweb/news/feeds.py +++ b/pgweb/news/feeds.py @@ -5,23 +5,23 @@ from models import NewsArticle from datetime import datetime, time class NewsFeed(Feed): - title = description = "PostgreSQL news" - link = "https://www.postgresql.org/" + title = description = "PostgreSQL news" + link = "https://www.postgresql.org/" - description_template = 'news/rss_description.html' - title_template = 'news/rss_title.html' + description_template = 'news/rss_description.html' + title_template = 'news/rss_title.html' - def get_object(self, request, tagurl=None): - return tagurl + def get_object(self, request, tagurl=None): + return tagurl - def items(self, obj): - if obj: - return NewsArticle.objects.filter(approved=True, tags__urlname=obj)[:10] - else: - return NewsArticle.objects.filter(approved=True)[:10] + def items(self, obj): + if obj: + return NewsArticle.objects.filter(approved=True, tags__urlname=obj)[:10] + else: + return NewsArticle.objects.filter(approved=True)[:10] - def item_link(self, obj): - return "https://www.postgresql.org/about/news/%s/" % obj.id + def item_link(self, obj): + return "https://www.postgresql.org/about/news/%s/" % obj.id - def item_pubdate(self, obj): - return datetime.combine(obj.date,time.min) + def item_pubdate(self, obj): + return datetime.combine(obj.date,time.min) diff --git a/pgweb/news/forms.py b/pgweb/news/forms.py index a779db01..a711cfae 100644 --- a/pgweb/news/forms.py +++ b/pgweb/news/forms.py @@ -5,25 +5,25 @@ from pgweb.core.models import Organisation from models import NewsArticle, NewsTag class NewsArticleForm(forms.ModelForm): - def __init__(self, *args, **kwargs): - super(NewsArticleForm, self).__init__(*args, **kwargs) - def filter_by_user(self, user): - self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) - def clean_date(self): - if self.instance.pk and self.instance.approved: - if self.cleaned_data['date'] != self.instance.date: - raise ValidationError("You cannot change the date on an article that has been approved") - return self.cleaned_data['date'] + def __init__(self, *args, **kwargs): + super(NewsArticleForm, self).__init__(*args, **kwargs) + def filter_by_user(self, user): + self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) + def clean_date(self): + if self.instance.pk and self.instance.approved: + if self.cleaned_data['date'] != self.instance.date: + raise ValidationError("You cannot change the date on an article that has been approved") + return self.cleaned_data['date'] - @property - def described_checkboxes(self): - return { - 'tags': {t.id: t.description for t in NewsTag.objects.all()} - } + @property + def described_checkboxes(self): + return { + 'tags': {t.id: t.description for t in NewsTag.objects.all()} + } - class Meta: - model = NewsArticle - exclude = ('submitter', 'approved', 'tweeted') - widgets = { - 'tags': forms.CheckboxSelectMultiple, - } + class Meta: + model = NewsArticle + exclude = ('submitter', 'approved', 'tweeted') + widgets = { + 'tags': forms.CheckboxSelectMultiple, + } diff --git a/pgweb/news/management/commands/twitter_post.py b/pgweb/news/management/commands/twitter_post.py index 73e5b2c5..ce49b24c 100644 --- a/pgweb/news/management/commands/twitter_post.py +++ b/pgweb/news/management/commands/twitter_post.py @@ -16,33 +16,33 @@ from pgweb.news.models import NewsArticle import requests_oauthlib class Command(BaseCommand): - help = 'Post to twitter' + help = 'Post to twitter' - def handle(self, *args, **options): - curs = connection.cursor() - curs.execute("SELECT pg_try_advisory_lock(62387372)") - if not curs.fetchall()[0][0]: - raise CommandError("Failed to get advisory lock, existing twitter_post process stuck?") + def handle(self, *args, **options): + curs = connection.cursor() + curs.execute("SELECT pg_try_advisory_lock(62387372)") + if not curs.fetchall()[0][0]: + raise CommandError("Failed to get advisory lock, existing twitter_post process stuck?") - articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now()-timedelta(days=7)).order_by('date')) - if not len(articles): - return + articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now()-timedelta(days=7)).order_by('date')) + if not len(articles): + return - tw = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, - settings.TWITTER_CLIENTSECRET, - settings.TWITTER_TOKEN, - settings.TWITTER_TOKENSECRET) + tw = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, + settings.TWITTER_CLIENTSECRET, + settings.TWITTER_TOKEN, + settings.TWITTER_TOKENSECRET) - for a in articles: - # We hardcode 30 chars for the URL shortener. And then 10 to cover the intro and spacing. - statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140-40], settings.SITE_ROOT, a.id) - r = tw.post('https://api.twitter.com/1.1/statuses/update.json', data={ - 'status': statusstr, - }) - if r.status_code != 200: - print("Failed to post to twitter: %s " % r) - else: - a.tweeted = True - a.save() - # Don't post more often than once / 30 seconds, to not trigger flooding. - time.sleep(30) + for a in articles: + # We hardcode 30 chars for the URL shortener. And then 10 to cover the intro and spacing. + statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140-40], settings.SITE_ROOT, a.id) + r = tw.post('https://api.twitter.com/1.1/statuses/update.json', data={ + 'status': statusstr, + }) + if r.status_code != 200: + print("Failed to post to twitter: %s " % r) + else: + a.tweeted = True + a.save() + # Don't post more often than once / 30 seconds, to not trigger flooding. + time.sleep(30) diff --git a/pgweb/news/management/commands/twitter_register.py b/pgweb/news/management/commands/twitter_register.py index 46c100db..6914d10f 100644 --- a/pgweb/news/management/commands/twitter_register.py +++ b/pgweb/news/management/commands/twitter_register.py @@ -10,34 +10,34 @@ from django.conf import settings import requests_oauthlib class Command(BaseCommand): - help = 'Register with twitter oauth' + help = 'Register with twitter oauth' - def handle(self, *args, **options): - if not hasattr(settings, 'TWITTER_CLIENT'): - raise CommandError("TWITTER_CLIENT must be set in settings_local.py") - if not hasattr(settings, 'TWITTER_CLIENTSECRET'): - raise CommandError("TWITTER_CLIENTSECRET must be set in settings_local.py") - if hasattr(settings, 'TWITTER_TOKEN'): - raise CommandError("TWITTER_TOKEN is already set in settings_local.py") - if hasattr(settings, 'TWITTER_TOKENSECRET'): - raise CommandError("TWITTER_TOKENSECRET is already set in settings_local.py") + def handle(self, *args, **options): + if not hasattr(settings, 'TWITTER_CLIENT'): + raise CommandError("TWITTER_CLIENT must be set in settings_local.py") + if not hasattr(settings, 'TWITTER_CLIENTSECRET'): + raise CommandError("TWITTER_CLIENTSECRET must be set in settings_local.py") + if hasattr(settings, 'TWITTER_TOKEN'): + raise CommandError("TWITTER_TOKEN is already set in settings_local.py") + if hasattr(settings, 'TWITTER_TOKENSECRET'): + raise CommandError("TWITTER_TOKENSECRET is already set in settings_local.py") - # OK, now we're good to go :) - oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, settings.TWITTER_CLIENTSECRET) - fetch_response = oauth.fetch_request_token('https://api.twitter.com/oauth/request_token') + # OK, now we're good to go :) + oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, settings.TWITTER_CLIENTSECRET) + fetch_response = oauth.fetch_request_token('https://api.twitter.com/oauth/request_token') - authorization_url = oauth.authorization_url('https://api.twitter.com/oauth/authorize') - print 'Please go here and authorize: %s' % authorization_url + authorization_url = oauth.authorization_url('https://api.twitter.com/oauth/authorize') + print 'Please go here and authorize: %s' % authorization_url - pin = raw_input('Paste the PIN here: ') + pin = raw_input('Paste the PIN here: ') - oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, - settings.TWITTER_CLIENTSECRET, - resource_owner_key=fetch_response.get('oauth_token'), - resource_owner_secret=fetch_response.get('oauth_token_secret'), - verifier=pin) - oauth_tokens = oauth.fetch_access_token('https://api.twitter.com/oauth/access_token') + oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, + settings.TWITTER_CLIENTSECRET, + resource_owner_key=fetch_response.get('oauth_token'), + resource_owner_secret=fetch_response.get('oauth_token_secret'), + verifier=pin) + oauth_tokens = oauth.fetch_access_token('https://api.twitter.com/oauth/access_token') - print("Authorized. Please configure:") - print("TWITTER_TOKEN='%s'" % oauth_tokens.get('oauth_token')) - print("TWITTER_TOKENSECRET='%s'" % oauth_tokens.get('oauth_token_secret')) + print("Authorized. Please configure:") + print("TWITTER_TOKEN='%s'" % oauth_tokens.get('oauth_token')) + print("TWITTER_TOKENSECRET='%s'" % oauth_tokens.get('oauth_token_secret')) diff --git a/pgweb/news/models.py b/pgweb/news/models.py index 2c25489a..d31f64e8 100644 --- a/pgweb/news/models.py +++ b/pgweb/news/models.py @@ -3,51 +3,51 @@ from datetime import date from pgweb.core.models import Organisation class NewsTag(models.Model): - urlname = models.CharField(max_length=20, null=False, blank=False, unique=True) - name = models.CharField(max_length=32, null=False, blank=False) - description = models.CharField(max_length=200, null=False, blank=False) + urlname = models.CharField(max_length=20, null=False, blank=False, unique=True) + name = models.CharField(max_length=32, null=False, blank=False) + description = models.CharField(max_length=200, null=False, blank=False) - def __unicode__(self): - return self.name + def __unicode__(self): + return self.name - class Meta: - ordering = ('urlname', ) + class Meta: + ordering = ('urlname', ) class NewsArticle(models.Model): - org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.") - approved = models.BooleanField(null=False, blank=False, default=False) - date = models.DateField(null=False, blank=False, default=date.today) - title = models.CharField(max_length=200, null=False, blank=False) - content = models.TextField(null=False, blank=False) - tweeted = models.BooleanField(null=False, blank=False, default=False) - tags = models.ManyToManyField(NewsTag, blank=False, help_text="Hover mouse over tags to view full description") + org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.") + approved = models.BooleanField(null=False, blank=False, default=False) + date = models.DateField(null=False, blank=False, default=date.today) + title = models.CharField(max_length=200, null=False, blank=False) + content = models.TextField(null=False, blank=False) + tweeted = models.BooleanField(null=False, blank=False, default=False) + tags = models.ManyToManyField(NewsTag, blank=False, help_text="Hover mouse over tags to view full description") - send_notification = True - send_m2m_notification = True - markdown_fields = ('content',) + send_notification = True + send_m2m_notification = True + markdown_fields = ('content',) - def purge_urls(self): - yield '/about/news/%s/' % self.pk - yield '/about/newsarchive/' - yield '/news.rss' - yield '/news/.*.rss' - # FIXME: when to expire the front page? - yield '/$' + def purge_urls(self): + yield '/about/news/%s/' % self.pk + yield '/about/newsarchive/' + yield '/news.rss' + yield '/news/.*.rss' + # FIXME: when to expire the front page? + yield '/$' - def __unicode__(self): - return "%s: %s" % (self.date, self.title) + def __unicode__(self): + return "%s: %s" % (self.date, self.title) - def verify_submitter(self, user): - return (len(self.org.managers.filter(pk=user.pk)) == 1) + def verify_submitter(self, user): + return (len(self.org.managers.filter(pk=user.pk)) == 1) - def is_migrated(self): - if self.org.pk == 0: - return True - return False + def is_migrated(self): + if self.org.pk == 0: + return True + return False - @property - def displaydate(self): - return self.date.strftime("%Y-%m-%d") + @property + def displaydate(self): + return self.date.strftime("%Y-%m-%d") - class Meta: - ordering = ('-date',) + class Meta: + ordering = ('-date',) diff --git a/pgweb/news/struct.py b/pgweb/news/struct.py index 2c06c874..4c49a196 100644 --- a/pgweb/news/struct.py +++ b/pgweb/news/struct.py @@ -2,16 +2,16 @@ from datetime import date, timedelta from models import NewsArticle def get_struct(): - now = date.today() - fouryearsago = date.today() - timedelta(4*365, 0, 0) + now = date.today() + fouryearsago = date.today() - timedelta(4*365, 0, 0) - # We intentionally don't put /about/newsarchive/ in the sitemap, - # since we don't care about getting it indexed. - # Also, don't bother indexing anything > 4 years old + # We intentionally don't put /about/newsarchive/ in the sitemap, + # since we don't care about getting it indexed. + # Also, don't bother indexing anything > 4 years old - for n in NewsArticle.objects.filter(approved=True, date__gt=fouryearsago): - yearsold = (now - n.date).days / 365 - if yearsold > 4: - yearsold = 4 - yield ('about/news/%s/' % n.id, - 0.5-(yearsold/10.0)) + for n in NewsArticle.objects.filter(approved=True, date__gt=fouryearsago): + yearsold = (now - n.date).days / 365 + if yearsold > 4: + yearsold = 4 + yield ('about/news/%s/' % n.id, + 0.5-(yearsold/10.0)) diff --git a/pgweb/news/views.py b/pgweb/news/views.py index 94ac1e6c..a055cd17 100644 --- a/pgweb/news/views.py +++ b/pgweb/news/views.py @@ -11,34 +11,34 @@ from forms import NewsArticleForm import json def archive(request, tag=None, paging=None): - if tag: - tag = get_object_or_404(NewsTag,urlname=tag.strip('/')) - news = NewsArticle.objects.filter(approved=True, tags=tag) - else: - tag = None - news = NewsArticle.objects.filter(approved=True) - return render_pgweb(request, 'about', 'news/newsarchive.html', { - 'news': news, - 'tag': tag, - 'newstags': NewsTag.objects.all(), - }) + if tag: + tag = get_object_or_404(NewsTag,urlname=tag.strip('/')) + news = NewsArticle.objects.filter(approved=True, tags=tag) + else: + tag = None + news = NewsArticle.objects.filter(approved=True) + return render_pgweb(request, 'about', 'news/newsarchive.html', { + 'news': news, + 'tag': tag, + 'newstags': NewsTag.objects.all(), + }) def item(request, itemid, throwaway=None): - news = get_object_or_404(NewsArticle, pk=itemid) - if not news.approved: - raise Http404 - return render_pgweb(request, 'about', 'news/item.html', { - 'obj': news, - 'newstags': NewsTag.objects.all(), - }) + news = get_object_or_404(NewsArticle, pk=itemid) + if not news.approved: + raise Http404 + return render_pgweb(request, 'about', 'news/item.html', { + 'obj': news, + 'newstags': NewsTag.objects.all(), + }) def taglist_json(request): - return HttpResponse(json.dumps({ - 'tags': [{'name': t.urlname, 'description': t.description} for t in NewsTag.objects.distinct('urlname')], - }), content_type='application/json') + return HttpResponse(json.dumps({ + 'tags': [{'name': t.urlname, 'description': t.description} for t in NewsTag.objects.distinct('urlname')], + }), content_type='application/json') @login_required def form(request, itemid): - return simple_form(NewsArticle, itemid, request, NewsArticleForm, - redirect='/account/edit/news/') + return simple_form(NewsArticle, itemid, request, NewsArticleForm, + redirect='/account/edit/news/') diff --git a/pgweb/profserv/admin.py b/pgweb/profserv/admin.py index 8ab8563e..b644146c 100644 --- a/pgweb/profserv/admin.py +++ b/pgweb/profserv/admin.py @@ -4,8 +4,8 @@ from pgweb.util.admin import PgwebAdmin from models import ProfessionalService class ProfessionalServiceAdmin(PgwebAdmin): - list_display = ('__unicode__', 'approved',) - list_filter = ('approved',) - search_fields = ('org__name',) + list_display = ('__unicode__', 'approved',) + list_filter = ('approved',) + search_fields = ('org__name',) admin.site.register(ProfessionalService, ProfessionalServiceAdmin) diff --git a/pgweb/profserv/forms.py b/pgweb/profserv/forms.py index f61cde67..8df2ff40 100644 --- a/pgweb/profserv/forms.py +++ b/pgweb/profserv/forms.py @@ -4,12 +4,12 @@ from pgweb.core.models import Organisation from models import ProfessionalService class ProfessionalServiceForm(forms.ModelForm): - form_intro = """Note that in order to register a new professional service, you must first register an organisation. + form_intro = """Note that in order to register a new professional service, you must first register an organisation. If you have not done so, use this form.""" - def __init__(self, *args, **kwargs): - super(ProfessionalServiceForm, self).__init__(*args, **kwargs) - def filter_by_user(self, user): - self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) - class Meta: - model = ProfessionalService - exclude = ('submitter', 'approved', ) + def __init__(self, *args, **kwargs): + super(ProfessionalServiceForm, self).__init__(*args, **kwargs) + def filter_by_user(self, user): + self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) + class Meta: + model = ProfessionalService + exclude = ('submitter', 'approved', ) diff --git a/pgweb/profserv/models.py b/pgweb/profserv/models.py index c72b134d..180e1d43 100644 --- a/pgweb/profserv/models.py +++ b/pgweb/profserv/models.py @@ -3,40 +3,40 @@ from django.db import models from pgweb.core.models import Organisation class ProfessionalService(models.Model): - approved = models.BooleanField(null=False, blank=False, default=False) + approved = models.BooleanField(null=False, blank=False, default=False) - org = models.OneToOneField(Organisation, null=False, blank=False, - db_column="organisation_id", - verbose_name="organisation", - help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.") - description = models.TextField(null=False,blank=False) - employees = models.CharField(max_length=32, null=True, blank=True) - locations = models.CharField(max_length=128, null=True, blank=True) - region_africa = models.BooleanField(null=False, default=False, verbose_name="Africa") - region_asia = models.BooleanField(null=False, default=False, verbose_name="Asia") - region_europe = models.BooleanField(null=False, default=False, verbose_name="Europe") - region_northamerica = models.BooleanField(null=False, default=False, verbose_name="North America") - region_oceania = models.BooleanField(null=False, default=False, verbose_name="Oceania") - region_southamerica = models.BooleanField(null=False, default=False, verbose_name="South America") - hours = models.CharField(max_length=128, null=True, blank=True) - languages = models.CharField(max_length=128, null=True, blank=True) - customerexample = models.TextField(blank=True, null=True, verbose_name="Customer Example") - experience = models.TextField(blank=True, null=True) - contact = models.TextField(null=True, blank=True) - url = models.URLField(max_length=128, null=True, blank=True, verbose_name="URL") - provides_support = models.BooleanField(null=False, default=False) - provides_hosting = models.BooleanField(null=False, default=False) - interfaces = models.CharField(max_length=512, null=True, blank=True, verbose_name="Interfaces (for hosting)") + org = models.OneToOneField(Organisation, null=False, blank=False, + db_column="organisation_id", + verbose_name="organisation", + help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.") + description = models.TextField(null=False,blank=False) + employees = models.CharField(max_length=32, null=True, blank=True) + locations = models.CharField(max_length=128, null=True, blank=True) + region_africa = models.BooleanField(null=False, default=False, verbose_name="Africa") + region_asia = models.BooleanField(null=False, default=False, verbose_name="Asia") + region_europe = models.BooleanField(null=False, default=False, verbose_name="Europe") + region_northamerica = models.BooleanField(null=False, default=False, verbose_name="North America") + region_oceania = models.BooleanField(null=False, default=False, verbose_name="Oceania") + region_southamerica = models.BooleanField(null=False, default=False, verbose_name="South America") + hours = models.CharField(max_length=128, null=True, blank=True) + languages = models.CharField(max_length=128, null=True, blank=True) + customerexample = models.TextField(blank=True, null=True, verbose_name="Customer Example") + experience = models.TextField(blank=True, null=True) + contact = models.TextField(null=True, blank=True) + url = models.URLField(max_length=128, null=True, blank=True, verbose_name="URL") + provides_support = models.BooleanField(null=False, default=False) + provides_hosting = models.BooleanField(null=False, default=False) + interfaces = models.CharField(max_length=512, null=True, blank=True, verbose_name="Interfaces (for hosting)") - purge_urls = ('/support/professional_', ) + purge_urls = ('/support/professional_', ) - send_notification = True + send_notification = True - def verify_submitter(self, user): - return (len(self.org.managers.filter(pk=user.pk)) == 1) + def verify_submitter(self, user): + return (len(self.org.managers.filter(pk=user.pk)) == 1) - def __unicode__(self): - return self.org.name + def __unicode__(self): + return self.org.name - class Meta: - ordering = ('org__name',) + class Meta: + ordering = ('org__name',) diff --git a/pgweb/profserv/struct.py b/pgweb/profserv/struct.py index e3192d2d..659753b2 100644 --- a/pgweb/profserv/struct.py +++ b/pgweb/profserv/struct.py @@ -1,6 +1,6 @@ from views import regions def get_struct(): - for key, name in regions: - yield ('support/professional_support/%s/' % key, None) - yield ('support/professional_hosting/%s/' % key, None) + for key, name in regions: + yield ('support/professional_support/%s/' % key, None) + yield ('support/professional_hosting/%s/' % key, None) diff --git a/pgweb/profserv/views.py b/pgweb/profserv/views.py index eb08c510..ad135d22 100644 --- a/pgweb/profserv/views.py +++ b/pgweb/profserv/views.py @@ -17,44 +17,44 @@ regions = ( ) def root(request, servtype): - title = servtype=='support' and 'Professional Services' or 'Hosting Providers' - what = servtype=='support' and 'support' or 'hosting' - support = servtype=='support' - return render_pgweb(request, 'support', 'profserv/root.html', { - 'title': title, - 'support': support, - 'regions': regions, - 'what': what, - }) + title = servtype=='support' and 'Professional Services' or 'Hosting Providers' + what = servtype=='support' and 'support' or 'hosting' + support = servtype=='support' + return render_pgweb(request, 'support', 'profserv/root.html', { + 'title': title, + 'support': support, + 'regions': regions, + 'what': what, + }) def region(request, servtype, regionname): - regname = [n for r,n in regions if r==regionname] - if not regname: - raise Http404 - regname = regname[0] + regname = [n for r,n in regions if r==regionname] + if not regname: + raise Http404 + regname = regname[0] - what = servtype=='support' and 'support' or 'hosting' - whatname = servtype=='support' and 'Professional Services' or 'Hosting Providers' - title = "%s - %s" % (whatname, regname) - support = servtype=='support' + what = servtype=='support' and 'support' or 'hosting' + whatname = servtype=='support' and 'Professional Services' or 'Hosting Providers' + title = "%s - %s" % (whatname, regname) + support = servtype=='support' - # DB model is a bit funky here, so use the extra-where functionality to filter properly. - # Field names are cleaned up earlier, so it's safe against injections. - services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what),]) + # DB model is a bit funky here, so use the extra-where functionality to filter properly. + # Field names are cleaned up earlier, so it's safe against injections. + services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what),]) - return render_pgweb(request, 'support', 'profserv/list.html', { - 'title': title, - 'support': support, - 'what': what, - 'whatname': whatname, - 'regionname': regname, - 'services': services, - }) + return render_pgweb(request, 'support', 'profserv/list.html', { + 'title': title, + 'support': support, + 'what': what, + 'whatname': whatname, + 'regionname': regname, + 'services': services, + }) # Forms to edit @login_required def profservform(request, itemid): - return simple_form(ProfessionalService, itemid, request, ProfessionalServiceForm, - redirect='/account/edit/services/') + return simple_form(ProfessionalService, itemid, request, ProfessionalServiceForm, + redirect='/account/edit/services/') diff --git a/pgweb/pugs/admin.py b/pgweb/pugs/admin.py index b4743154..35e13e73 100644 --- a/pgweb/pugs/admin.py +++ b/pgweb/pugs/admin.py @@ -4,8 +4,8 @@ from pgweb.util.admin import PgwebAdmin from models import PUG class PUGAdmin(PgwebAdmin): - list_display = ('title', 'approved', ) - list_filter = ('approved', ) - search_fields = ('title', ) + list_display = ('title', 'approved', ) + list_filter = ('approved', ) + search_fields = ('title', ) admin.site.register(PUG, PUGAdmin) diff --git a/pgweb/pugs/models.py b/pgweb/pugs/models.py index a72e0626..54383f10 100644 --- a/pgweb/pugs/models.py +++ b/pgweb/pugs/models.py @@ -1,19 +1,19 @@ from django.db import models class PUG(models.Model): - """ - contains information about a local PostgreSQL user group - """ - country = models.ForeignKey('core.Country') - org = models.ForeignKey('core.Organisation', null=True, blank=True, help_text='Organisation that manages the PUG and its contents') - approved = models.BooleanField(null=False, blank=False, default=False) - locale = models.CharField(max_length=255, help_text="Locale where the PUG meets, e.g. 'New York City'") - title = models.CharField(max_length=255, help_text="Title/Name of the PUG, e.g. 'NYC PostgreSQL User Group'") - website_url = models.TextField(null=True, blank=True) - mailing_list_url = models.TextField(null=True, blank=True) + """ + contains information about a local PostgreSQL user group + """ + country = models.ForeignKey('core.Country') + org = models.ForeignKey('core.Organisation', null=True, blank=True, help_text='Organisation that manages the PUG and its contents') + approved = models.BooleanField(null=False, blank=False, default=False) + locale = models.CharField(max_length=255, help_text="Locale where the PUG meets, e.g. 'New York City'") + title = models.CharField(max_length=255, help_text="Title/Name of the PUG, e.g. 'NYC PostgreSQL User Group'") + website_url = models.TextField(null=True, blank=True) + mailing_list_url = models.TextField(null=True, blank=True) - purge_urls = ('/community/user-groups/', ) - send_notification = True + purge_urls = ('/community/user-groups/', ) + send_notification = True - def __unicode__(self): - return self.title + def __unicode__(self): + return self.title diff --git a/pgweb/pugs/views.py b/pgweb/pugs/views.py index 83414ed7..167fd1f0 100644 --- a/pgweb/pugs/views.py +++ b/pgweb/pugs/views.py @@ -3,18 +3,18 @@ from pgweb.util.contexts import render_pgweb from models import PUG def index(request): - """ - contains list of PUGs, in country/locale alphabetical order - """ - pug_list = [] - for pug in PUG.objects.filter(approved=True).order_by('country__name', 'locale').all(): - if pug_list and pug_list[-1].get('country') == pug.country.name: - pug_list[-1]['pugs'].append(pug) - else: - pug_list.append({ - 'country': pug.country.name, - 'pugs': [pug] - }) - return render_pgweb(request, 'community', 'pugs/index.html', { - 'pug_list': pug_list, - }) + """ + contains list of PUGs, in country/locale alphabetical order + """ + pug_list = [] + for pug in PUG.objects.filter(approved=True).order_by('country__name', 'locale').all(): + if pug_list and pug_list[-1].get('country') == pug.country.name: + pug_list[-1]['pugs'].append(pug) + else: + pug_list.append({ + 'country': pug.country.name, + 'pugs': [pug] + }) + return render_pgweb(request, 'community', 'pugs/index.html', { + 'pug_list': pug_list, + }) diff --git a/pgweb/quotes/admin.py b/pgweb/quotes/admin.py index 8b5ed8cf..39267e17 100644 --- a/pgweb/quotes/admin.py +++ b/pgweb/quotes/admin.py @@ -2,6 +2,6 @@ from django.contrib import admin from models import Quote class QuoteAdmin(admin.ModelAdmin): - list_display = ('quote', 'who', 'org', ) + list_display = ('quote', 'who', 'org', ) admin.site.register(Quote, QuoteAdmin) diff --git a/pgweb/quotes/models.py b/pgweb/quotes/models.py index f390f65e..fa690bcf 100644 --- a/pgweb/quotes/models.py +++ b/pgweb/quotes/models.py @@ -1,18 +1,18 @@ from django.db import models class Quote(models.Model): - approved = models.BooleanField(null=False, default=False) - quote = models.TextField(null=False, blank=False) - who = models.CharField(max_length=100, null=False, blank=False) - org = models.CharField(max_length=100, null=False, blank=False) - link = models.URLField(null=False, blank=False) + approved = models.BooleanField(null=False, default=False) + quote = models.TextField(null=False, blank=False) + who = models.CharField(max_length=100, null=False, blank=False) + org = models.CharField(max_length=100, null=False, blank=False) + link = models.URLField(null=False, blank=False) - send_notification = True + send_notification = True - purge_urls = ('/about/quotesarchive/', '/$', ) + purge_urls = ('/about/quotesarchive/', '/$', ) - def __unicode__(self): - if len(self.quote) > 75: - return "%s..." % self.quote[:75] - else: - return self.quote + def __unicode__(self): + if len(self.quote) > 75: + return "%s..." % self.quote[:75] + else: + return self.quote diff --git a/pgweb/search/views.py b/pgweb/search/views.py index c6061a78..07f5ded1 100644 --- a/pgweb/search/views.py +++ b/pgweb/search/views.py @@ -17,305 +17,305 @@ from pgweb.lists.models import MailingList # Conditionally import memcached library. Everything will work without # it, so we allow development installs to run without it... try: - import pylibmc - has_memcached=True + import pylibmc + has_memcached=True except: - has_memcached=False + has_memcached=False def generate_pagelinks(pagenum, totalpages, querystring): - # Generate a list of links to page through a search result - # We generate these in HTML from the python code because it's - # simply too ugly to try to do it in the template. - if totalpages < 2: - return + # Generate a list of links to page through a search result + # We generate these in HTML from the python code because it's + # simply too ugly to try to do it in the template. + if totalpages < 2: + return - if pagenum > 1: - # Prev link - yield 'Prev' % (querystring, pagenum-1) + if pagenum > 1: + # Prev link + yield 'Prev' % (querystring, pagenum-1) - if pagenum > 10: - start = pagenum - 10 - else: - start = 1 + if pagenum > 10: + start = pagenum - 10 + else: + start = 1 - for i in range(start, min(start+20, totalpages + 1)): - if i == pagenum: - yield "%s" % i - else: - yield '%s' % (querystring, i, i) + for i in range(start, min(start+20, totalpages + 1)): + if i == pagenum: + yield "%s" % i + else: + yield '%s' % (querystring, i, i) - if pagenum != min(start+20, totalpages): - yield 'Next' % (querystring, pagenum+1) + if pagenum != min(start+20, totalpages): + yield 'Next' % (querystring, pagenum+1) @csrf_exempt @cache(minutes=15) def search(request): - # Perform a general web search - # Since this lives in a different database, we open a direct - # connection with psycopg, thus bypassing everything that has to do - # with django. + # Perform a general web search + # Since this lives in a different database, we open a direct + # connection with psycopg, thus bypassing everything that has to do + # with django. - # constants that we might eventually want to make configurable - hitsperpage = 20 + # constants that we might eventually want to make configurable + hitsperpage = 20 - if request.GET.has_key('m') and request.GET['m'] == '1': - searchlists = True + if request.GET.has_key('m') and request.GET['m'] == '1': + searchlists = True - if request.GET.has_key('l'): - if request.GET['l'] != '': - try: - listid = int(request.GET['l']) - except: - listid = None - else: - listid = None - else: - # Listid not specified. But do we have the name? - if request.GET.has_key('ln'): - try: - ll = MailingList.objects.get(listname=request.GET['ln']) - listid = ll.id - except MailingList.DoesNotExist: - # Invalid list name just resets the default of the form, - # no need to throw an error. - listid = None - else: - listid = None + if request.GET.has_key('l'): + if request.GET['l'] != '': + try: + listid = int(request.GET['l']) + except: + listid = None + else: + listid = None + else: + # Listid not specified. But do we have the name? + if request.GET.has_key('ln'): + try: + ll = MailingList.objects.get(listname=request.GET['ln']) + listid = ll.id + except MailingList.DoesNotExist: + # Invalid list name just resets the default of the form, + # no need to throw an error. + listid = None + else: + listid = None - if request.GET.has_key('d'): - try: - dateval = int(request.GET['d']) - except: - dateval = None - else: - dateval = None + if request.GET.has_key('d'): + try: + dateval = int(request.GET['d']) + except: + dateval = None + else: + dateval = None - if request.GET.has_key('s'): - listsort = request.GET['s'] - if not listsort in ('r', 'd', 'i'): - listsort = 'r' - else: - listsort = 'r' + if request.GET.has_key('s'): + listsort = request.GET['s'] + if not listsort in ('r', 'd', 'i'): + listsort = 'r' + else: + listsort = 'r' - if not dateval: - dateval = 365 + if not dateval: + dateval = 365 - sortoptions = ( - {'val':'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')}, - {'val':'d', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'}, - {'val':'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'}, - ) - dateoptions = ( - {'val': -1, 'text': 'anytime'}, - {'val': 1, 'text': 'within last day'}, - {'val': 7, 'text': 'within last week'}, - {'val': 31, 'text': 'within last month'}, - {'val': 186, 'text': 'within last 6 months'}, - {'val': 365, 'text': 'within last year'}, - ) - else: - searchlists = False - if request.GET.has_key('u'): - suburl = request.GET['u'] - else: - suburl = None + sortoptions = ( + {'val':'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')}, + {'val':'d', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'}, + {'val':'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'}, + ) + dateoptions = ( + {'val': -1, 'text': 'anytime'}, + {'val': 1, 'text': 'within last day'}, + {'val': 7, 'text': 'within last week'}, + {'val': 31, 'text': 'within last month'}, + {'val': 186, 'text': 'within last 6 months'}, + {'val': 365, 'text': 'within last year'}, + ) + else: + searchlists = False + if request.GET.has_key('u'): + suburl = request.GET['u'] + else: + suburl = None - if request.GET.has_key('a'): - allsites = (request.GET['a'] == "1") - else: - allsites = False + if request.GET.has_key('a'): + allsites = (request.GET['a'] == "1") + else: + allsites = False - # Check that we actually have something to search for - if not request.GET.has_key('q') or request.GET['q'] == '': - if searchlists: - return render(request, 'search/listsearch.html', { - 'search_error': "No search term specified.", - 'sortoptions': sortoptions, - 'lists': MailingList.objects.all().order_by("group__sortkey"), - 'listid': listid, - 'dates': dateoptions, - 'dateval': dateval, - }) - else: - return render(request, 'search/sitesearch.html', { - 'search_error': "No search term specified.", - }) - query = request.GET['q'].strip() + # Check that we actually have something to search for + if not request.GET.has_key('q') or request.GET['q'] == '': + if searchlists: + return render(request, 'search/listsearch.html', { + 'search_error': "No search term specified.", + 'sortoptions': sortoptions, + 'lists': MailingList.objects.all().order_by("group__sortkey"), + 'listid': listid, + 'dates': dateoptions, + 'dateval': dateval, + }) + else: + return render(request, 'search/sitesearch.html', { + 'search_error': "No search term specified.", + }) + query = request.GET['q'].strip() - # Anti-stefan prevention - if len(query) > 1000: - return render(request, 'search/sitesearch.html', { - 'search_error': "Search term too long.", - }) + # Anti-stefan prevention + if len(query) > 1000: + return render(request, 'search/sitesearch.html', { + 'search_error': "Search term too long.", + }) - # Is the request being paged? - if request.GET.has_key('p'): - try: - pagenum = int(request.GET['p']) - except: - pagenum = 1 - else: - pagenum = 1 + # Is the request being paged? + if request.GET.has_key('p'): + try: + pagenum = int(request.GET['p']) + except: + pagenum = 1 + else: + pagenum = 1 - firsthit = (pagenum - 1) * hitsperpage + 1 + firsthit = (pagenum - 1) * hitsperpage + 1 - if searchlists: - # Lists are searched by passing the work down using a http - # API. In the future, we probably want to do everything - # through a http API and merge hits, but that's for later - p = { - 'q': query.encode('utf-8'), - 's': listsort, - } - if listid: - if listid < 0: - # This is a list group, we expand that on the web server - p['ln'] = ','.join([x.listname for x in MailingList.objects.filter(group=-listid)]) - else: - p['ln'] = MailingList.objects.get(pk=listid).listname - if dateval: - p['d'] = dateval - urlstr = urllib.urlencode(p) - # If memcached is available, let's try it - hits = None - if has_memcached: - memc = pylibmc.Client(['127.0.0.1',], binary=True) - # behavior not supported on pylibmc in squeeze:: behaviors={'tcp_nodelay':True}) - try: - hits = memc.get(urlstr) - except Exception: - # If we had an exception, don't try to store either - memc = None - if not hits: - # No hits found - so try to get them from the search server - if settings.ARCHIVES_SEARCH_PLAINTEXT: - c = httplib.HTTPConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5) - else: - c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5) - c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'}) - c.sock.settimeout(20) # Set a 20 second timeout - try: - r = c.getresponse() - except (socket.timeout, ssl.SSLError): - return render(request, 'search/listsearch.html', { - 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.', - }) - if r.status != 200: - memc = None - return render(request, 'search/listsearch.html', { - 'search_error': 'Error talking to search server: %s' % r.reason, - }) - hits = json.loads(r.read()) - if has_memcached and memc: - # Store them in memcached too! But only for 10 minutes... - # And always compress it, just because we can - memc.set(urlstr, hits, 60*10, 1) - memc = None + if searchlists: + # Lists are searched by passing the work down using a http + # API. In the future, we probably want to do everything + # through a http API and merge hits, but that's for later + p = { + 'q': query.encode('utf-8'), + 's': listsort, + } + if listid: + if listid < 0: + # This is a list group, we expand that on the web server + p['ln'] = ','.join([x.listname for x in MailingList.objects.filter(group=-listid)]) + else: + p['ln'] = MailingList.objects.get(pk=listid).listname + if dateval: + p['d'] = dateval + urlstr = urllib.urlencode(p) + # If memcached is available, let's try it + hits = None + if has_memcached: + memc = pylibmc.Client(['127.0.0.1',], binary=True) + # behavior not supported on pylibmc in squeeze:: behaviors={'tcp_nodelay':True}) + try: + hits = memc.get(urlstr) + except Exception: + # If we had an exception, don't try to store either + memc = None + if not hits: + # No hits found - so try to get them from the search server + if settings.ARCHIVES_SEARCH_PLAINTEXT: + c = httplib.HTTPConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5) + else: + c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5) + c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'}) + c.sock.settimeout(20) # Set a 20 second timeout + try: + r = c.getresponse() + except (socket.timeout, ssl.SSLError): + return render(request, 'search/listsearch.html', { + 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.', + }) + if r.status != 200: + memc = None + return render(request, 'search/listsearch.html', { + 'search_error': 'Error talking to search server: %s' % r.reason, + }) + hits = json.loads(r.read()) + if has_memcached and memc: + # Store them in memcached too! But only for 10 minutes... + # And always compress it, just because we can + memc.set(urlstr, hits, 60*10, 1) + memc = None - if isinstance(hits, dict): - # This is not just a list of hits. - # Right now the only supported dict result is a messageid - # match, but make sure that's what it is. - if hits['messageidmatch'] == 1: - return HttpResponseRedirect("/message-id/%s" % query) + if isinstance(hits, dict): + # This is not just a list of hits. + # Right now the only supported dict result is a messageid + # match, but make sure that's what it is. + if hits['messageidmatch'] == 1: + return HttpResponseRedirect("/message-id/%s" % query) - totalhits = len(hits) - querystr = "?m=1&q=%s&l=%s&d=%s&s=%s" % ( - urllib.quote_plus(query.encode('utf-8')), - listid or '', - dateval, - listsort - ) + totalhits = len(hits) + querystr = "?m=1&q=%s&l=%s&d=%s&s=%s" % ( + urllib.quote_plus(query.encode('utf-8')), + listid or '', + dateval, + listsort + ) - return render(request, 'search/listsearch.html', { - 'hitcount': totalhits, - 'firsthit': firsthit, - 'lasthit': min(totalhits, firsthit+hitsperpage-1), - 'query': request.GET['q'], - 'pagelinks': " ".join( - generate_pagelinks(pagenum, - totalhits / hitsperpage + 1, - querystr)), - 'hits': [{ - 'date': h['d'], - 'subject': h['s'], - 'author': h['f'], - 'messageid': h['m'], - 'abstract': h['a'], - 'rank': h['r'], - } for h in hits[firsthit-1:firsthit+hitsperpage-1]], - 'sortoptions': sortoptions, - 'lists': MailingList.objects.all().order_by("group__sortkey"), - 'listid': listid, - 'dates': dateoptions, - 'dateval': dateval, - }) + return render(request, 'search/listsearch.html', { + 'hitcount': totalhits, + 'firsthit': firsthit, + 'lasthit': min(totalhits, firsthit+hitsperpage-1), + 'query': request.GET['q'], + 'pagelinks': " ".join( + generate_pagelinks(pagenum, + totalhits / hitsperpage + 1, + querystr)), + 'hits': [{ + 'date': h['d'], + 'subject': h['s'], + 'author': h['f'], + 'messageid': h['m'], + 'abstract': h['a'], + 'rank': h['r'], + } for h in hits[firsthit-1:firsthit+hitsperpage-1]], + 'sortoptions': sortoptions, + 'lists': MailingList.objects.all().order_by("group__sortkey"), + 'listid': listid, + 'dates': dateoptions, + 'dateval': dateval, + }) - else: - # Website search is still done by making a regular pgsql connection - # to the search server. - try: - conn = psycopg2.connect(settings.SEARCH_DSN) - curs = conn.cursor() - except: - return render(request, 'search/sitesearch.html', { - 'search_error': 'Could not connect to search database.' - }) + else: + # Website search is still done by making a regular pgsql connection + # to the search server. + try: + conn = psycopg2.connect(settings.SEARCH_DSN) + curs = conn.cursor() + except: + return render(request, 'search/sitesearch.html', { + 'search_error': 'Could not connect to search database.' + }) - # This is kind of a hack, but... Some URLs are flagged as internal - # and should as such only be included in searches that explicitly - # reference the suburl that they are in. - if suburl and suburl.startswith('/docs/devel'): - include_internal = True - else: - include_internal = False + # This is kind of a hack, but... Some URLs are flagged as internal + # and should as such only be included in searches that explicitly + # reference the suburl that they are in. + if suburl and suburl.startswith('/docs/devel'): + include_internal = True + else: + include_internal = False - # perform the query for general web search - try: - curs.execute("SELECT * FROM site_search(%(query)s, %(firsthit)s, %(hitsperpage)s, %(allsites)s, %(suburl)s, %(internal)s)", { - 'query': query, - 'firsthit': firsthit - 1, - 'hitsperpage': hitsperpage, - 'allsites': allsites, - 'suburl': suburl, - 'internal': include_internal, - }) - except psycopg2.ProgrammingError: - return render(request, 'search/sitesearch.html', { - 'search_error': 'Error executing search query.' - }) + # perform the query for general web search + try: + curs.execute("SELECT * FROM site_search(%(query)s, %(firsthit)s, %(hitsperpage)s, %(allsites)s, %(suburl)s, %(internal)s)", { + 'query': query, + 'firsthit': firsthit - 1, + 'hitsperpage': hitsperpage, + 'allsites': allsites, + 'suburl': suburl, + 'internal': include_internal, + }) + except psycopg2.ProgrammingError: + return render(request, 'search/sitesearch.html', { + 'search_error': 'Error executing search query.' + }) - hits = curs.fetchall() - conn.close() - totalhits = int(hits[-1][5]) - try: - if suburl: - quoted_suburl = urllib.quote_plus(suburl) - else: - quoted_suburl = '' - except: - quoted_suburl = '' - querystr = "?q=%s&a=%s&u=%s" % ( - urllib.quote_plus(query.encode('utf-8')), - allsites and "1" or "0", - quoted_suburl, - ) + hits = curs.fetchall() + conn.close() + totalhits = int(hits[-1][5]) + try: + if suburl: + quoted_suburl = urllib.quote_plus(suburl) + else: + quoted_suburl = '' + except: + quoted_suburl = '' + querystr = "?q=%s&a=%s&u=%s" % ( + urllib.quote_plus(query.encode('utf-8')), + allsites and "1" or "0", + quoted_suburl, + ) - return render(request, 'search/sitesearch.html', { - 'suburl': suburl, - 'allsites': allsites, - 'hitcount': totalhits, - 'firsthit': firsthit, - 'lasthit': min(totalhits, firsthit+hitsperpage-1), - 'query': request.GET['q'], - 'pagelinks': " ".join( - generate_pagelinks(pagenum, - totalhits / hitsperpage + 1, - querystr)), - 'hits': [{ - 'title': h[3], - 'url': "%s%s" % (h[1], h[2]), - 'abstract': h[4].replace("[[[[[[", "").replace("]]]]]]",""), - 'rank': h[5]} for h in hits[:-1]], - }) + return render(request, 'search/sitesearch.html', { + 'suburl': suburl, + 'allsites': allsites, + 'hitcount': totalhits, + 'firsthit': firsthit, + 'lasthit': min(totalhits, firsthit+hitsperpage-1), + 'query': request.GET['q'], + 'pagelinks': " ".join( + generate_pagelinks(pagenum, + totalhits / hitsperpage + 1, + querystr)), + 'hits': [{ + 'title': h[3], + 'url': "%s%s" % (h[1], h[2]), + 'abstract': h[4].replace("[[[[[[", "").replace("]]]]]]",""), + 'rank': h[5]} for h in hits[:-1]], + }) diff --git a/pgweb/security/admin.py b/pgweb/security/admin.py index 9863a764..977a407b 100644 --- a/pgweb/security/admin.py +++ b/pgweb/security/admin.py @@ -7,59 +7,59 @@ from pgweb.news.models import NewsArticle from models import SecurityPatch, SecurityPatchVersion class VersionChoiceField(forms.ModelChoiceField): - def label_from_instance(self, obj): - return obj.numtree + def label_from_instance(self, obj): + return obj.numtree class SecurityPatchVersionAdminForm(forms.ModelForm): - model = SecurityPatchVersion - version = VersionChoiceField(queryset=Version.objects.filter(tree__gt=0), required=True) + model = SecurityPatchVersion + version = VersionChoiceField(queryset=Version.objects.filter(tree__gt=0), required=True) class SecurityPatchVersionAdmin(admin.TabularInline): - model = SecurityPatchVersion - extra = 2 - form = SecurityPatchVersionAdminForm + model = SecurityPatchVersion + extra = 2 + form = SecurityPatchVersionAdminForm class SecurityPatchForm(forms.ModelForm): - model = SecurityPatch - newspost = forms.ModelChoiceField(queryset=NewsArticle.objects.filter(org=settings.PGDG_ORG_ID), required=False) + model = SecurityPatch + newspost = forms.ModelChoiceField(queryset=NewsArticle.objects.filter(org=settings.PGDG_ORG_ID), required=False) - def clean(self): - d = super(SecurityPatchForm, self).clean() - vecs = [v for k,v in d.items() if k.startswith('vector_')] - empty = [v for v in vecs if v == ''] - if len(empty) != len(vecs) and len(empty) != 0: - for k in d.keys(): - if k.startswith('vector_'): - self.add_error(k, 'Either specify all vector values or none') - return d + def clean(self): + d = super(SecurityPatchForm, self).clean() + vecs = [v for k,v in d.items() if k.startswith('vector_')] + empty = [v for v in vecs if v == ''] + if len(empty) != len(vecs) and len(empty) != 0: + for k in d.keys(): + if k.startswith('vector_'): + self.add_error(k, 'Either specify all vector values or none') + return d class SecurityPatchAdmin(admin.ModelAdmin): - form = SecurityPatchForm - exclude = ['cvenumber', ] - inlines = (SecurityPatchVersionAdmin, ) - list_display = ('cve', 'public', 'cvssscore', 'legacyscore', 'cvssvector', 'description') - actions = ['make_public', 'make_unpublic'] + form = SecurityPatchForm + exclude = ['cvenumber', ] + inlines = (SecurityPatchVersionAdmin, ) + list_display = ('cve', 'public', 'cvssscore', 'legacyscore', 'cvssvector', 'description') + actions = ['make_public', 'make_unpublic'] - def cvssvector(self, obj): - if not obj.cvssvector: - return '' - return '{0}'.format( - obj.cvssvector) - cvssvector.allow_tags = True - cvssvector.short_description = "CVSS vector link" + def cvssvector(self, obj): + if not obj.cvssvector: + return '' + return '{0}'.format( + obj.cvssvector) + cvssvector.allow_tags = True + cvssvector.short_description = "CVSS vector link" - def cvssscore(self, obj): - return obj.cvssscore - cvssscore.short_description = "CVSS score" + def cvssscore(self, obj): + return obj.cvssscore + cvssscore.short_description = "CVSS score" - def make_public(self, request, queryset): - self.do_public(queryset, True) - def make_unpublic(self, request, queryset): - self.do_public(queryset, False) - def do_public(self, queryset, val): - # Intentionally loop and do manually, so we generate change notices - for p in queryset.all(): - p.public=val - p.save() + def make_public(self, request, queryset): + self.do_public(queryset, True) + def make_unpublic(self, request, queryset): + self.do_public(queryset, False) + def do_public(self, queryset, val): + # Intentionally loop and do manually, so we generate change notices + for p in queryset.all(): + p.public=val + p.save() admin.site.register(SecurityPatch, SecurityPatchAdmin) diff --git a/pgweb/security/management/commands/update_cve_links.py b/pgweb/security/management/commands/update_cve_links.py index 159b124e..e74c072a 100644 --- a/pgweb/security/management/commands/update_cve_links.py +++ b/pgweb/security/management/commands/update_cve_links.py @@ -14,24 +14,24 @@ from pgweb.util.misc import varnish_purge import requests class Command(BaseCommand): - help = 'Update CVE links' + help = 'Update CVE links' - def handle(self, *args, **options): - with transaction.atomic(): - newly_visible = [] - for s in SecurityPatch.objects.filter(cve_visible=False): - r = requests.get(s.cvelink, timeout=10) - if r.status_code == 200: - newly_visible.append(s.cve) - s.cve_visible = True - s.save() - if newly_visible: - send_simple_mail(settings.NOTIFICATION_FROM, - settings.NOTIFICATION_EMAIL, - "CVE entries made public", - """The following CVE entries are now public upstream, + def handle(self, *args, **options): + with transaction.atomic(): + newly_visible = [] + for s in SecurityPatch.objects.filter(cve_visible=False): + r = requests.get(s.cvelink, timeout=10) + if r.status_code == 200: + newly_visible.append(s.cve) + s.cve_visible = True + s.save() + if newly_visible: + send_simple_mail(settings.NOTIFICATION_FROM, + settings.NOTIFICATION_EMAIL, + "CVE entries made public", + """The following CVE entries are now public upstream, and have been made visible on the website. {0} """.format("\n".join(newly_visible))) - map(varnish_purge, SecurityPatch.purge_urls) + map(varnish_purge, SecurityPatch.purge_urls) diff --git a/pgweb/security/migrations/0002_cve_visible.py b/pgweb/security/migrations/0002_cve_visible.py index 03661226..c6bc6ad0 100644 --- a/pgweb/security/migrations/0002_cve_visible.py +++ b/pgweb/security/migrations/0002_cve_visible.py @@ -16,7 +16,7 @@ class Migration(migrations.Migration): name='cve_visible', field=models.BooleanField(default=True), ), - migrations.AlterField( + migrations.AlterField( model_name='securitypatch', name='cve_visible', field=models.BooleanField(default=False), diff --git a/pgweb/security/models.py b/pgweb/security/models.py index 0c6e7bab..34166fca 100644 --- a/pgweb/security/models.py +++ b/pgweb/security/models.py @@ -11,106 +11,106 @@ import cvss vector_choices = {k:list(v.items()) for k,v in cvss.constants3.METRICS_VALUE_NAMES.items()} component_choices = ( - ('core server', 'Core server product'), - ('client', 'Client library or application only'), - ('contrib module', 'Contrib module only'), - ('client contrib module', 'Client contrib module only'), - ('packaging', 'Packaging, e.g. installers or RPM'), - ('other', 'Other'), + ('core server', 'Core server product'), + ('client', 'Client library or application only'), + ('contrib module', 'Contrib module only'), + ('client contrib module', 'Client contrib module only'), + ('packaging', 'Packaging, e.g. installers or RPM'), + ('other', 'Other'), ) re_cve = re.compile('^(\d{4})-(\d{4,5})$') def cve_validator(val): - if not re_cve.match(val): - raise ValidationError("Enter CVE in format 0000-0000 without the CVE text") + if not re_cve.match(val): + raise ValidationError("Enter CVE in format 0000-0000 without the CVE text") def other_vectors_validator(val): - if val != val.upper(): - raise ValidationError("Vector must be uppercase") + if val != val.upper(): + raise ValidationError("Vector must be uppercase") - try: - for vector in val.split('/'): - k,v = vector.split(':') - if not cvss.constants3.METRICS_VALUES.has_key(k): - raise ValidationError("Metric {0} is unknown".format(k)) - if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'): - raise ValidationError("Metric {0} must be specified in the dropdowns".format(k)) - if not cvss.constants3.METRICS_VALUES[k].has_key(v): - raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format( - k,v, - ", ".join(cvss.constants3.METRICS_VALUES[k].keys()), - )) - except ValidationError: - raise - except Exception, e: - raise ValidationError("Failed to parse vectors: %s" % e) + try: + for vector in val.split('/'): + k,v = vector.split(':') + if not cvss.constants3.METRICS_VALUES.has_key(k): + raise ValidationError("Metric {0} is unknown".format(k)) + if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'): + raise ValidationError("Metric {0} must be specified in the dropdowns".format(k)) + if not cvss.constants3.METRICS_VALUES[k].has_key(v): + raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format( + k,v, + ", ".join(cvss.constants3.METRICS_VALUES[k].keys()), + )) + except ValidationError: + raise + except Exception, e: + raise ValidationError("Failed to parse vectors: %s" % e) class SecurityPatch(models.Model): - public = models.BooleanField(null=False, blank=False, default=False) - newspost = models.ForeignKey(NewsArticle, null=True, blank=True) - cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator,]) - cve_visible = models.BooleanField(null=False, blank=False, default=False) - cvenumber = models.IntegerField(null=False, blank=False, db_index=True) - detailslink = models.URLField(null=False, blank=True) - description = models.TextField(null=False, blank=False) - component = models.CharField(max_length=32, null=False, blank=False, help_text="If multiple components, choose the most critical one", choices=component_choices) + public = models.BooleanField(null=False, blank=False, default=False) + newspost = models.ForeignKey(NewsArticle, null=True, blank=True) + cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator,]) + cve_visible = models.BooleanField(null=False, blank=False, default=False) + cvenumber = models.IntegerField(null=False, blank=False, db_index=True) + detailslink = models.URLField(null=False, blank=True) + description = models.TextField(null=False, blank=False) + component = models.CharField(max_length=32, null=False, blank=False, help_text="If multiple components, choose the most critical one", choices=component_choices) - versions = models.ManyToManyField(Version, through='SecurityPatchVersion') + versions = models.ManyToManyField(Version, through='SecurityPatchVersion') - vector_av = models.CharField(max_length=1, null=False, blank=True, verbose_name="Attack Vector", choices=vector_choices['AV']) - vector_ac = models.CharField(max_length=1, null=False, blank=True, verbose_name="Attack Complexity", choices=vector_choices['AC']) - vector_pr = models.CharField(max_length=1, null=False, blank=True, verbose_name="Privileges Required", choices=vector_choices['PR']) - vector_ui = models.CharField(max_length=1, null=False, blank=True, verbose_name="User Interaction", choices=vector_choices['UI']) - vector_s = models.CharField(max_length=1, null=False, blank=True, verbose_name="Scope", choices=vector_choices['S']) - vector_c = models.CharField(max_length=1, null=False, blank=True, verbose_name="Confidentiality Impact", choices=vector_choices['C']) - vector_i = models.CharField(max_length=1, null=False, blank=True, verbose_name="Integrity Impact", choices=vector_choices['I']) - vector_a = models.CharField(max_length=1, null=False, blank=True, verbose_name="Availability Impact", choices=vector_choices['A']) - legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'),('B','B'),('C','C'),('D','D'))) + vector_av = models.CharField(max_length=1, null=False, blank=True, verbose_name="Attack Vector", choices=vector_choices['AV']) + vector_ac = models.CharField(max_length=1, null=False, blank=True, verbose_name="Attack Complexity", choices=vector_choices['AC']) + vector_pr = models.CharField(max_length=1, null=False, blank=True, verbose_name="Privileges Required", choices=vector_choices['PR']) + vector_ui = models.CharField(max_length=1, null=False, blank=True, verbose_name="User Interaction", choices=vector_choices['UI']) + vector_s = models.CharField(max_length=1, null=False, blank=True, verbose_name="Scope", choices=vector_choices['S']) + vector_c = models.CharField(max_length=1, null=False, blank=True, verbose_name="Confidentiality Impact", choices=vector_choices['C']) + vector_i = models.CharField(max_length=1, null=False, blank=True, verbose_name="Integrity Impact", choices=vector_choices['I']) + vector_a = models.CharField(max_length=1, null=False, blank=True, verbose_name="Availability Impact", choices=vector_choices['A']) + legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'),('B','B'),('C','C'),('D','D'))) - purge_urls = ('/support/security/', ) + purge_urls = ('/support/security/', ) - def save(self, force_insert=False, force_update=False): - # Calculate a number from the CVE, that we can use to sort by. We need to - # do this, because CVEs can have 4 or 5 digit second parts... - if self.cve == '': - self.cvenumber = 0 - else: - m = re_cve.match(self.cve) - if not m: - raise ValidationError("Invalid CVE, should not get here!") - self.cvenumber = 100000 * int(m.groups(0)[0]) + int(m.groups(0)[1]) - super(SecurityPatch, self).save(force_insert, force_update) + def save(self, force_insert=False, force_update=False): + # Calculate a number from the CVE, that we can use to sort by. We need to + # do this, because CVEs can have 4 or 5 digit second parts... + if self.cve == '': + self.cvenumber = 0 + else: + m = re_cve.match(self.cve) + if not m: + raise ValidationError("Invalid CVE, should not get here!") + self.cvenumber = 100000 * int(m.groups(0)[0]) + int(m.groups(0)[1]) + super(SecurityPatch, self).save(force_insert, force_update) - def __unicode__(self): - return self.cve + def __unicode__(self): + return self.cve - @property - def cvssvector(self): - if not self.vector_av: - return None - s = 'AV:{0}/AC:{1}/PR:{2}/UI:{3}/S:{4}/C:{5}/I:{6}/A:{7}'.format( - self.vector_av, self.vector_ac, self.vector_pr, self.vector_ui, - self.vector_s, self.vector_c, self.vector_i, self.vector_a) - return s + @property + def cvssvector(self): + if not self.vector_av: + return None + s = 'AV:{0}/AC:{1}/PR:{2}/UI:{3}/S:{4}/C:{5}/I:{6}/A:{7}'.format( + self.vector_av, self.vector_ac, self.vector_pr, self.vector_ui, + self.vector_s, self.vector_c, self.vector_i, self.vector_a) + return s - @property - def cvssscore(self): - try: - c = cvss.CVSS3("CVSS:3.0/" + self.cvssvector) - return c.base_score - except Exception: - return -1 + @property + def cvssscore(self): + try: + c = cvss.CVSS3("CVSS:3.0/" + self.cvssvector) + return c.base_score + except Exception: + return -1 - @property - def cvelink(self): - return "https://access.redhat.com/security/cve/CVE-{0}".format(self.cve) + @property + def cvelink(self): + return "https://access.redhat.com/security/cve/CVE-{0}".format(self.cve) - class Meta: - verbose_name_plural = 'Security patches' - ordering = ('-cvenumber',) + class Meta: + verbose_name_plural = 'Security patches' + ordering = ('-cvenumber',) class SecurityPatchVersion(models.Model): - patch = models.ForeignKey(SecurityPatch, null=False, blank=False) - version = models.ForeignKey(Version, null=False, blank=False) - fixed_minor = models.IntegerField(null=False, blank=False) + patch = models.ForeignKey(SecurityPatch, null=False, blank=False) + version = models.ForeignKey(Version, null=False, blank=False) + fixed_minor = models.IntegerField(null=False, blank=False) diff --git a/pgweb/security/views.py b/pgweb/security/views.py index 86dfa3e4..eeba3663 100644 --- a/pgweb/security/views.py +++ b/pgweb/security/views.py @@ -6,25 +6,25 @@ from pgweb.core.models import Version from models import SecurityPatch def GetPatchesList(filt): - return SecurityPatch.objects.raw("SELECT p.*, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END ORDER BY v.tree) AS affected, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END || '.' || fixed_minor ORDER BY v.tree) AS fixed FROM security_securitypatch p INNER JOIN security_securitypatchversion sv ON p.id=sv.patch_id INNER JOIN core_version v ON v.id=sv.version_id WHERE p.public AND {0} GROUP BY p.id ORDER BY cvenumber DESC".format(filt)) + return SecurityPatch.objects.raw("SELECT p.*, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END ORDER BY v.tree) AS affected, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END || '.' || fixed_minor ORDER BY v.tree) AS fixed FROM security_securitypatch p INNER JOIN security_securitypatchversion sv ON p.id=sv.patch_id INNER JOIN core_version v ON v.id=sv.version_id WHERE p.public AND {0} GROUP BY p.id ORDER BY cvenumber DESC".format(filt)) def _list_patches(request, filt): - patches = GetPatchesList(filt) + patches = GetPatchesList(filt) - return render_pgweb(request, 'support', 'security/security.html', { - 'patches': patches, - 'supported': Version.objects.filter(supported=True), - 'unsupported': Version.objects.filter(supported=False, tree__gt=0).extra( - where=["EXISTS (SELECT 1 FROM security_securitypatchversion pv WHERE pv.version_id=core_version.id)"], - ), - }) + return render_pgweb(request, 'support', 'security/security.html', { + 'patches': patches, + 'supported': Version.objects.filter(supported=True), + 'unsupported': Version.objects.filter(supported=False, tree__gt=0).extra( + where=["EXISTS (SELECT 1 FROM security_securitypatchversion pv WHERE pv.version_id=core_version.id)"], + ), + }) def index(request): - # Show all supported versions - return _list_patches(request, "v.supported") + # Show all supported versions + return _list_patches(request, "v.supported") def version(request, numtree): - version = get_object_or_404(Version, tree=numtree) - # It's safe to pass in the value since we get it from the module, not from - # the actual querystring. - return _list_patches(request, "EXISTS (SELECT 1 FROM security_securitypatchversion svv WHERE svv.version_id={0} AND svv.patch_id=p.id)".format(version.id)) + version = get_object_or_404(Version, tree=numtree) + # It's safe to pass in the value since we get it from the module, not from + # the actual querystring. + return _list_patches(request, "EXISTS (SELECT 1 FROM security_securitypatchversion svv WHERE svv.version_id={0} AND svv.patch_id=p.id)".format(version.id)) diff --git a/pgweb/settings.py b/pgweb/settings.py index 11789ecc..68a86f06 100644 --- a/pgweb/settings.py +++ b/pgweb/settings.py @@ -3,17 +3,17 @@ DEBUG = False ADMINS = ( - ('PostgreSQL Webmaster', 'webmaster@postgresql.org'), + ('PostgreSQL Webmaster', 'webmaster@postgresql.org'), ) MANAGERS = ADMINS DATABASES={ - 'default': { - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'NAME': 'pgweb', - } - } + 'default': { + 'ENGINE': 'django.db.backends.postgresql_psycopg2', + 'NAME': 'pgweb', + } + } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name @@ -43,7 +43,7 @@ MEDIA_URL = '' STATIC_URL = '/media/' STATICFILES_DIRS = ( - 'media/', + 'media/', ) # Make this unique, and don't share it with anybody. @@ -52,7 +52,7 @@ SECRET_KEY = 'REALLYCHANGETHISINSETTINGS_LOCAL.PY' MIDDLEWARE_CLASSES = [ 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'pgweb.util.middleware.PgMiddleware', @@ -63,21 +63,21 @@ CSRF_FAILURE_VIEW='pgweb.core.views.csrf_failure' ROOT_URLCONF = 'pgweb.urls' TEMPLATES = [{ - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': ['templates', ], - 'OPTIONS': { - 'context_processors': [ - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - 'django.template.context_processors.media', - 'pgweb.util.contexts.PGWebContextProcessor', - ], - 'loaders': [ - 'pgweb.util.templateloader.TrackingTemplateLoader', - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', - ], - }, + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': ['templates', ], + 'OPTIONS': { + 'context_processors': [ + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + 'django.template.context_processors.media', + 'pgweb.util.contexts.PGWebContextProcessor', + ], + 'loaders': [ + 'pgweb.util.templateloader.TrackingTemplateLoader', + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + ], + }, }] LOGIN_URL='/account/login/' @@ -95,7 +95,7 @@ INSTALLED_APPS = [ 'django.contrib.sessions', 'django.contrib.admin', 'django_markwhat', - 'django.contrib.staticfiles', + 'django.contrib.staticfiles', 'pgweb.selectable', 'pgweb.core', 'pgweb.mailqueue', @@ -113,7 +113,7 @@ INSTALLED_APPS = [ 'pgweb.survey', 'pgweb.misc', 'pgweb.featurematrix', - 'pgweb.search', + 'pgweb.search', 'pgweb.pugs', ] @@ -151,7 +151,7 @@ DOCSREPORT_EMAIL="someone@example.com" # Address to pgsql-docs l DOCSREPORT_NOREPLY_EMAIL="someone-noreply@example.com" # Address to no-reply pgsql-docs address FRONTEND_SERVERS=() # A tuple containing the *IP addresses* of all the # varnish frontend servers in use. -FTP_MASTERS=() # A tuple containing the *IP addresses* of all machines +FTP_MASTERS=() # A tuple containing the *IP addresses* of all machines # trusted to upload ftp structure data VARNISH_PURGERS=() # Extra servers that can do varnish purges through our queue DO_ESI=False # Generate ESI tags diff --git a/pgweb/sponsors/models.py b/pgweb/sponsors/models.py index 6c0031ab..e5d48d83 100644 --- a/pgweb/sponsors/models.py +++ b/pgweb/sponsors/models.py @@ -3,47 +3,47 @@ from django.db import models from pgweb.core.models import Country class SponsorType(models.Model): - typename = models.CharField(max_length=32, null=False, blank=False) - description = models.TextField(null=False, blank=False) - sortkey = models.IntegerField(null=False, default=10) - # sortkey==0 --> do not show in list + typename = models.CharField(max_length=32, null=False, blank=False) + description = models.TextField(null=False, blank=False) + sortkey = models.IntegerField(null=False, default=10) + # sortkey==0 --> do not show in list - purge_urls = ('/about/servers/', '/about/sponsors/', ) + purge_urls = ('/about/servers/', '/about/sponsors/', ) - def __unicode__(self): - return self.typename + def __unicode__(self): + return self.typename - class Meta: - ordering = ('sortkey', ) + class Meta: + ordering = ('sortkey', ) class Sponsor(models.Model): - sponsortype = models.ForeignKey(SponsorType, null=False) - name = models.CharField(max_length=128, null=False, blank=False) - url = models.URLField(null=False, blank=False) - logoname = models.CharField(max_length=64, null=False, blank=False) - country = models.ForeignKey(Country, null=False) + sponsortype = models.ForeignKey(SponsorType, null=False) + name = models.CharField(max_length=128, null=False, blank=False) + url = models.URLField(null=False, blank=False) + logoname = models.CharField(max_length=64, null=False, blank=False) + country = models.ForeignKey(Country, null=False) - purge_urls = ('/about/sponsors/', '/about/servers/', ) + purge_urls = ('/about/sponsors/', '/about/servers/', ) - def __unicode__(self): - return self.name + def __unicode__(self): + return self.name - class Meta: - ordering = ('name', ) + class Meta: + ordering = ('name', ) class Server(models.Model): - name = models.CharField(max_length=32, null=False, blank=False) - sponsors = models.ManyToManyField(Sponsor) - dedicated = models.BooleanField(null=False, default=True) - performance = models.CharField(max_length=128, null=False, blank=False) - os = models.CharField(max_length=32, null=False, blank=False) - location = models.CharField(max_length=128, null=False, blank=False) - usage = models.TextField(null=False, blank=False) + name = models.CharField(max_length=32, null=False, blank=False) + sponsors = models.ManyToManyField(Sponsor) + dedicated = models.BooleanField(null=False, default=True) + performance = models.CharField(max_length=128, null=False, blank=False) + os = models.CharField(max_length=32, null=False, blank=False) + location = models.CharField(max_length=128, null=False, blank=False) + usage = models.TextField(null=False, blank=False) - purge_urls = ('/about/servers/', ) + purge_urls = ('/about/servers/', ) - def __unicode__(self): - return self.name + def __unicode__(self): + return self.name - class Meta: - ordering = ('name', ) + class Meta: + ordering = ('name', ) diff --git a/pgweb/sponsors/struct.py b/pgweb/sponsors/struct.py index 99ce243c..d128f35d 100644 --- a/pgweb/sponsors/struct.py +++ b/pgweb/sponsors/struct.py @@ -1,3 +1,3 @@ def get_struct(): - yield ('about/sponsors/', None) - yield ('about/servers/', None) + yield ('about/sponsors/', None) + yield ('about/servers/', None) diff --git a/pgweb/sponsors/views.py b/pgweb/sponsors/views.py index 7b676f76..15bef9d4 100644 --- a/pgweb/sponsors/views.py +++ b/pgweb/sponsors/views.py @@ -5,13 +5,13 @@ from models import Sponsor, Server @cache(minutes=30) def sponsors(request): - sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey' ,'?') - return render_pgweb(request, 'about', 'sponsors/sponsors.html', { - 'sponsors': sponsors, - }) + sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey' ,'?') + return render_pgweb(request, 'about', 'sponsors/sponsors.html', { + 'sponsors': sponsors, + }) def servers(request): - servers = Server.objects.select_related().all() - return render_pgweb(request, 'about', 'sponsors/servers.html', { - 'servers': servers, - }) + servers = Server.objects.select_related().all() + return render_pgweb(request, 'about', 'sponsors/servers.html', { + 'servers': servers, + }) diff --git a/pgweb/survey/admin.py b/pgweb/survey/admin.py index 23d62319..cdd9c4cd 100644 --- a/pgweb/survey/admin.py +++ b/pgweb/survey/admin.py @@ -2,12 +2,12 @@ from django.contrib import admin from models import Survey, SurveyLock, SurveyAnswer class SurveyAdmin(admin.ModelAdmin): - list_display = ('question','posted','current',) - ordering = ('-posted',) + list_display = ('question','posted','current',) + ordering = ('-posted',) class SurveyAnswerAdmin(admin.ModelAdmin): - list_display = ('survey','tot1','tot2','tot3','tot4','tot5','tot6','tot7','tot8') - ordering = ('-survey__posted',) + list_display = ('survey','tot1','tot2','tot3','tot4','tot5','tot6','tot7','tot8') + ordering = ('-survey__posted',) admin.site.register(Survey, SurveyAdmin) admin.site.register(SurveyLock) diff --git a/pgweb/survey/models.py b/pgweb/survey/models.py index 9ea171ae..72cfe7c7 100644 --- a/pgweb/survey/models.py +++ b/pgweb/survey/models.py @@ -2,94 +2,94 @@ from django.db import models # internal text/value object class SurveyQuestion(object): - def __init__(self, value, text): - self.value = value - self.text = text + def __init__(self, value, text): + self.value = value + self.text = text class SurveyAnswerValues(object): - def __init__(self, option, votes, votespercent): - self.option = option - self.votes = votes - self.votespercent = votespercent + def __init__(self, option, votes, votespercent): + self.option = option + self.votes = votes + self.votespercent = votespercent class Survey(models.Model): - question = models.CharField(max_length=500, null=False, blank=False) - opt1 = models.CharField(max_length=500, null=False, blank=False) - opt2 = models.CharField(max_length=500, null=False, blank=False) - opt3 = models.CharField(max_length=500, null=False, blank=True) - opt4 = models.CharField(max_length=500, null=False, blank=True) - opt5 = models.CharField(max_length=500, null=False, blank=True) - opt6 = models.CharField(max_length=500, null=False, blank=True) - opt7 = models.CharField(max_length=500, null=False, blank=True) - opt8 = models.CharField(max_length=500, null=False, blank=True) - posted = models.DateTimeField(null=False, auto_now_add=True) - current = models.BooleanField(null=False, default=False) + question = models.CharField(max_length=500, null=False, blank=False) + opt1 = models.CharField(max_length=500, null=False, blank=False) + opt2 = models.CharField(max_length=500, null=False, blank=False) + opt3 = models.CharField(max_length=500, null=False, blank=True) + opt4 = models.CharField(max_length=500, null=False, blank=True) + opt5 = models.CharField(max_length=500, null=False, blank=True) + opt6 = models.CharField(max_length=500, null=False, blank=True) + opt7 = models.CharField(max_length=500, null=False, blank=True) + opt8 = models.CharField(max_length=500, null=False, blank=True) + posted = models.DateTimeField(null=False, auto_now_add=True) + current = models.BooleanField(null=False, default=False) - purge_urls = ('/community/survey', '/community/$') + purge_urls = ('/community/survey', '/community/$') - def __unicode__(self): - return self.question + def __unicode__(self): + return self.question - @property - def questions(self): - for i in range (1,9): - v = getattr(self, "opt%s" % i) - if not v: break - yield SurveyQuestion(i, v) + @property + def questions(self): + for i in range (1,9): + v = getattr(self, "opt%s" % i) + if not v: break + yield SurveyQuestion(i, v) - @property - def answers(self): - if not hasattr(self, "_answers"): - self._answers = SurveyAnswer.objects.get_or_create(survey=self)[0] - return self._answers + @property + def answers(self): + if not hasattr(self, "_answers"): + self._answers = SurveyAnswer.objects.get_or_create(survey=self)[0] + return self._answers - @property - def completeanswers(self): - for a in self._get_complete_answers(): - yield SurveyAnswerValues(a[0], a[1], self.totalvotes>0 and (100*a[1]/self.totalvotes) or 0) + @property + def completeanswers(self): + for a in self._get_complete_answers(): + yield SurveyAnswerValues(a[0], a[1], self.totalvotes>0 and (100*a[1]/self.totalvotes) or 0) - @property - def totalvotes(self): - if not hasattr(self,"_totalvotes"): - self._totalvotes = 0 - for a in self._get_complete_answers(): - self._totalvotes = self._totalvotes + a[1] - return self._totalvotes + @property + def totalvotes(self): + if not hasattr(self,"_totalvotes"): + self._totalvotes = 0 + for a in self._get_complete_answers(): + self._totalvotes = self._totalvotes + a[1] + return self._totalvotes - def _get_complete_answers(self): - for i in range(1,9): - q = getattr(self, "opt%s" % i) - if not q: break - n = getattr(self.answers, "tot%s" % i) - yield (q,n) + def _get_complete_answers(self): + for i in range(1,9): + q = getattr(self, "opt%s" % i) + if not q: break + n = getattr(self.answers, "tot%s" % i) + yield (q,n) - def save(self): - # Make sure only one survey at a time can be the current one - # (there may be some small race conditions here, but the likelihood - # that two admins are editing the surveys at the same time...) - if self.current: - previous = Survey.objects.filter(current=True) - for p in previous: - if not p == self: - p.current = False - p.save() # primary key check avoids recursion + def save(self): + # Make sure only one survey at a time can be the current one + # (there may be some small race conditions here, but the likelihood + # that two admins are editing the surveys at the same time...) + if self.current: + previous = Survey.objects.filter(current=True) + for p in previous: + if not p == self: + p.current = False + p.save() # primary key check avoids recursion - # Now that we've made any previously current ones non-current, we are - # free to save this one. - super(Survey, self).save() + # Now that we've made any previously current ones non-current, we are + # free to save this one. + super(Survey, self).save() class SurveyAnswer(models.Model): - survey = models.OneToOneField(Survey, null=False, blank=False, primary_key=True) - tot1 = models.IntegerField(null=False, default=0) - tot2 = models.IntegerField(null=False, default=0) - tot3 = models.IntegerField(null=False, default=0) - tot4 = models.IntegerField(null=False, default=0) - tot5 = models.IntegerField(null=False, default=0) - tot6 = models.IntegerField(null=False, default=0) - tot7 = models.IntegerField(null=False, default=0) - tot8 = models.IntegerField(null=False, default=0) + survey = models.OneToOneField(Survey, null=False, blank=False, primary_key=True) + tot1 = models.IntegerField(null=False, default=0) + tot2 = models.IntegerField(null=False, default=0) + tot3 = models.IntegerField(null=False, default=0) + tot4 = models.IntegerField(null=False, default=0) + tot5 = models.IntegerField(null=False, default=0) + tot6 = models.IntegerField(null=False, default=0) + tot7 = models.IntegerField(null=False, default=0) + tot8 = models.IntegerField(null=False, default=0) - purge_urls = ('/community/survey', ) + purge_urls = ('/community/survey', ) class SurveyLock(models.Model): - ipaddr = models.GenericIPAddressField(null=False, blank=False) - time = models.DateTimeField(null=False, auto_now_add=True) + ipaddr = models.GenericIPAddressField(null=False, blank=False) + time = models.DateTimeField(null=False, auto_now_add=True) diff --git a/pgweb/survey/views.py b/pgweb/survey/views.py index 09dd0738..ba3c09a3 100644 --- a/pgweb/survey/views.py +++ b/pgweb/survey/views.py @@ -11,52 +11,52 @@ from pgweb.util.helpers import HttpServerError from models import Survey, SurveyAnswer, SurveyLock def results(request, surveyid, junk=None): - survey = get_object_or_404(Survey, pk=surveyid) - surveylist = Survey.objects.all().order_by('-posted') + survey = get_object_or_404(Survey, pk=surveyid) + surveylist = Survey.objects.all().order_by('-posted') - return render_pgweb(request, 'community', 'survey/results.html', { - 'survey': survey, - 'surveylist': surveylist, - }) + return render_pgweb(request, 'community', 'survey/results.html', { + 'survey': survey, + 'surveylist': surveylist, + }) # Served over insecure HTTP, the Varnish proxy strips cookies @csrf_exempt def vote(request, surveyid): - surv = get_object_or_404(Survey, pk=surveyid) + surv = get_object_or_404(Survey, pk=surveyid) - # Check that we have a valid answer number - try: - ansnum = int(request.POST['answer']) - if ansnum < 1 or ansnum > 8: - return HttpServerError(request, "Invalid answer") - except: - # When no answer is given, redirect to results instead - return HttpResponseRedirect("/community/survey/%s-%s" % (surv.id, slugify(surv.question))) - attrname = "tot%s" % ansnum + # Check that we have a valid answer number + try: + ansnum = int(request.POST['answer']) + if ansnum < 1 or ansnum > 8: + return HttpServerError(request, "Invalid answer") + except: + # When no answer is given, redirect to results instead + return HttpResponseRedirect("/community/survey/%s-%s" % (surv.id, slugify(surv.question))) + attrname = "tot%s" % ansnum - # Do IP based locking... - addr = get_client_ip(request) + # Do IP based locking... + addr = get_client_ip(request) - # Clean out any old junk - curs = connection.cursor() - curs.execute("DELETE FROM survey_surveylock WHERE (\"time\" + '15 minutes') < now()") + # Clean out any old junk + curs = connection.cursor() + curs.execute("DELETE FROM survey_surveylock WHERE (\"time\" + '15 minutes') < now()") - # Check if we are locked - lock = SurveyLock.objects.filter(ipaddr=addr) - if len(lock) > 0: - return HttpServerError(request, "Too many requests from your IP in the past 15 minutes") + # Check if we are locked + lock = SurveyLock.objects.filter(ipaddr=addr) + if len(lock) > 0: + return HttpServerError(request, "Too many requests from your IP in the past 15 minutes") - # Generate a new lock item, and store it - lock = SurveyLock(ipaddr=addr) - lock.save() + # Generate a new lock item, and store it + lock = SurveyLock(ipaddr=addr) + lock.save() - answers = SurveyAnswer.objects.get_or_create(survey=surv)[0] - setattr(answers, attrname, getattr(answers, attrname)+1) - answers.save() + answers = SurveyAnswer.objects.get_or_create(survey=surv)[0] + setattr(answers, attrname, getattr(answers, attrname)+1) + answers.save() - # Do explicit varnish purge, since it seems that the model doesn't - # do it properly. Possibly because of the cute stuff we do with - # getattr/setattr above. - varnish_purge("/community/survey/%s/" % surveyid) + # Do explicit varnish purge, since it seems that the model doesn't + # do it properly. Possibly because of the cute stuff we do with + # getattr/setattr above. + varnish_purge("/community/survey/%s/" % surveyid) - return HttpResponseRedirect("/community/survey/%s/" % surveyid) + return HttpResponseRedirect("/community/survey/%s/" % surveyid) diff --git a/pgweb/urls.py b/pgweb/urls.py index 4f89f6b6..c0b290d7 100644 --- a/pgweb/urls.py +++ b/pgweb/urls.py @@ -31,131 +31,131 @@ from pgweb.news.feeds import NewsFeed from pgweb.events.feeds import EventFeed urlpatterns = [ - url(r'^$', pgweb.core.views.home), - url(r'^dyncss/(?Pbase|docs).css$', pgweb.core.views.dynamic_css), + url(r'^$', pgweb.core.views.home), + url(r'^dyncss/(?Pbase|docs).css$', pgweb.core.views.dynamic_css), - url(r'^about/$', pgweb.core.views.about), - url(r'^about/newsarchive/([^/]+/)?$', pgweb.news.views.archive), - url(r'^about/news/(\d+)(-.*)?/$', pgweb.news.views.item), - url(r'^about/news/taglist.json/$', pgweb.news.views.taglist_json), - url(r'^about/events/$', pgweb.events.views.main), - url(r'^about/eventarchive/$', pgweb.events.views.archive), - url(r'^about/event/(\d+)(-.*)?/$', pgweb.events.views.item), - url(r'^about/featurematrix/$', pgweb.featurematrix.views.root), - url(r'^about/featurematrix/detail/(\d+)/$', pgweb.featurematrix.views.detail), + url(r'^about/$', pgweb.core.views.about), + url(r'^about/newsarchive/([^/]+/)?$', pgweb.news.views.archive), + url(r'^about/news/(\d+)(-.*)?/$', pgweb.news.views.item), + url(r'^about/news/taglist.json/$', pgweb.news.views.taglist_json), + url(r'^about/events/$', pgweb.events.views.main), + url(r'^about/eventarchive/$', pgweb.events.views.archive), + url(r'^about/event/(\d+)(-.*)?/$', pgweb.events.views.item), + url(r'^about/featurematrix/$', pgweb.featurematrix.views.root), + url(r'^about/featurematrix/detail/(\d+)/$', pgweb.featurematrix.views.detail), - url(r'^ftp/(.*/)?$', pgweb.downloads.views.ftpbrowser), - url(r'^download/mirrors-ftp/+(.*)$', pgweb.downloads.views.mirrorselect), - url(r'^download/product-categories/$', pgweb.downloads.views.categorylist), - url(r'^download/products/(\d+)(-.*)?/$', pgweb.downloads.views.productlist), - url(r'^applications-v2.xml$', pgweb.downloads.views.applications_v2_xml), - url(r'^download/uploadftp/', pgweb.downloads.views.uploadftp), - url(r'^download/uploadyum/', pgweb.downloads.views.uploadyum), - url(r'^download/js/yum.js', pgweb.downloads.views.yum_js), + url(r'^ftp/(.*/)?$', pgweb.downloads.views.ftpbrowser), + url(r'^download/mirrors-ftp/+(.*)$', pgweb.downloads.views.mirrorselect), + url(r'^download/product-categories/$', pgweb.downloads.views.categorylist), + url(r'^download/products/(\d+)(-.*)?/$', pgweb.downloads.views.productlist), + url(r'^applications-v2.xml$', pgweb.downloads.views.applications_v2_xml), + url(r'^download/uploadftp/', pgweb.downloads.views.uploadftp), + url(r'^download/uploadyum/', pgweb.downloads.views.uploadyum), + url(r'^download/js/yum.js', pgweb.downloads.views.yum_js), - url(r'^docs/$', pgweb.docs.views.root), - url(r'^docs/manuals/$', pgweb.docs.views.manuals), - url(r'^docs/manuals/archive/$', pgweb.docs.views.manualarchive), - # Legacy URLs for accessing the docs page; provides a permanent redirect - url(r'^docs/(current|devel|\d+(?:\.\d)?)/(static|interactive)/((.*).html?)?$', pgweb.docs.views.docspermanentredirect), - url(r'^docs/(current|devel|\d+(?:\.\d)?)/(.*).html?$', pgweb.docs.views.docpage), - url(r'^docs/(current|devel|\d+(?:\.\d)?)/$', pgweb.docs.views.docsrootpage), - url(r'^docs/(current|devel|\d+(?:\.\d)?)/$', pgweb.docs.views.redirect_root), + url(r'^docs/$', pgweb.docs.views.root), + url(r'^docs/manuals/$', pgweb.docs.views.manuals), + url(r'^docs/manuals/archive/$', pgweb.docs.views.manualarchive), + # Legacy URLs for accessing the docs page; provides a permanent redirect + url(r'^docs/(current|devel|\d+(?:\.\d)?)/(static|interactive)/((.*).html?)?$', pgweb.docs.views.docspermanentredirect), + url(r'^docs/(current|devel|\d+(?:\.\d)?)/(.*).html?$', pgweb.docs.views.docpage), + url(r'^docs/(current|devel|\d+(?:\.\d)?)/$', pgweb.docs.views.docsrootpage), + url(r'^docs/(current|devel|\d+(?:\.\d)?)/$', pgweb.docs.views.redirect_root), - url(r'^community/$', pgweb.core.views.community), - url(r'^community/contributors/$', pgweb.contributors.views.completelist), - url(r'^community/lists/$', RedirectView.as_view(url='/list/', permanent=True)), - url(r'^community/lists/subscribe/$', RedirectView.as_view(url='https://lists.postgresql.org/', permanent=True)), + url(r'^community/$', pgweb.core.views.community), + url(r'^community/contributors/$', pgweb.contributors.views.completelist), + url(r'^community/lists/$', RedirectView.as_view(url='/list/', permanent=True)), + url(r'^community/lists/subscribe/$', RedirectView.as_view(url='https://lists.postgresql.org/', permanent=True)), - url(r'^community/lists/listinfo/$', pgweb.lists.views.listinfo), - url(r'^community/survey/vote/(\d+)/$', pgweb.survey.views.vote), - url(r'^community/survey[/\.](\d+)(-.*)?/$', pgweb.survey.views.results), - url(r'^community/user-groups/$', pgweb.pugs.views.index), + url(r'^community/lists/listinfo/$', pgweb.lists.views.listinfo), + url(r'^community/survey/vote/(\d+)/$', pgweb.survey.views.vote), + url(r'^community/survey[/\.](\d+)(-.*)?/$', pgweb.survey.views.results), + url(r'^community/user-groups/$', pgweb.pugs.views.index), - url(r'^search/$', pgweb.search.views.search), + url(r'^search/$', pgweb.search.views.search), - url(r'^support/security/$', pgweb.security.views.index), - url(r'^support/security/(\d\.\d|\d{2})/$', pgweb.security.views.version), - url(r'^support/security_archive/$', RedirectView.as_view(url='/support/security/', permanent=True)), + url(r'^support/security/$', pgweb.security.views.index), + url(r'^support/security/(\d\.\d|\d{2})/$', pgweb.security.views.version), + url(r'^support/security_archive/$', RedirectView.as_view(url='/support/security/', permanent=True)), - url(r'^support/professional_(support|hosting)/$', pgweb.profserv.views.root), - url(r'^support/professional_(support|hosting)[/_](.*)/$', pgweb.profserv.views.region), - url(r'^account/submitbug/$', pgweb.misc.views.submitbug), - url(r'^account/submitbug/(\d+)/$', pgweb.misc.views.submitbug_done), - url(r'^support/submitbug/$', RedirectView.as_view(url='/account/submitbug/', permanent=True)), - url(r'^support/versioning/$', pgweb.core.views.versions), - url(r'^bugs_redir/(\d+)/$', pgweb.misc.views.bugs_redir), + url(r'^support/professional_(support|hosting)/$', pgweb.profserv.views.root), + url(r'^support/professional_(support|hosting)[/_](.*)/$', pgweb.profserv.views.region), + url(r'^account/submitbug/$', pgweb.misc.views.submitbug), + url(r'^account/submitbug/(\d+)/$', pgweb.misc.views.submitbug_done), + url(r'^support/submitbug/$', RedirectView.as_view(url='/account/submitbug/', permanent=True)), + url(r'^support/versioning/$', pgweb.core.views.versions), + url(r'^bugs_redir/(\d+)/$', pgweb.misc.views.bugs_redir), - url(r'^about/sponsors/$', pgweb.sponsors.views.sponsors), - url(r'^about/servers/$', pgweb.sponsors.views.servers), + url(r'^about/sponsors/$', pgweb.sponsors.views.sponsors), + url(r'^about/servers/$', pgweb.sponsors.views.servers), - url(r'^robots.txt$', pgweb.core.views.robots), + url(r'^robots.txt$', pgweb.core.views.robots), - ### - # RSS feeds - ### - url(r'^versions.rss$', VersionFeed()), - url(r'^news(/(?P[^/]+))?.rss$', NewsFeed()), - url(r'^events.rss$', EventFeed()), + ### + # RSS feeds + ### + url(r'^versions.rss$', VersionFeed()), + url(r'^news(/(?P[^/]+))?.rss$', NewsFeed()), + url(r'^events.rss$', EventFeed()), - ### - # Special sections - ### - url(r'^account/', include('pgweb.account.urls')), + ### + # Special sections + ### + url(r'^account/', include('pgweb.account.urls')), - ### - # Sitemap (FIXME: support for >50k urls!) - ### - url(r'^sitemap.xml', pgweb.core.views.sitemap), - url(r'^sitemap_internal.xml', pgweb.core.views.sitemap_internal), + ### + # Sitemap (FIXME: support for >50k urls!) + ### + url(r'^sitemap.xml', pgweb.core.views.sitemap), + url(r'^sitemap_internal.xml', pgweb.core.views.sitemap_internal), - ### - # Workaround for broken links pushed in press release - ### - url(r'^downloads/$', RedirectView.as_view(url='/download/', permanent=True)), + ### + # Workaround for broken links pushed in press release + ### + url(r'^downloads/$', RedirectView.as_view(url='/download/', permanent=True)), - ### - # Legacy URLs from old structurs, but used in places like press releases - # so needs to live a bit longer. - ### - url(r'^about/press/contact/$', RedirectView.as_view(url='/about/press/', permanent=True)), + ### + # Legacy URLs from old structurs, but used in places like press releases + # so needs to live a bit longer. + ### + url(r'^about/press/contact/$', RedirectView.as_view(url='/about/press/', permanent=True)), - ### - # Images that are used from other community sites - ### - url(r'^layout/images/(?P[a-z0-9_\.]+)$', RedirectView.as_view(url='/media/img/layout/%(f)s', permanent=True)), - ### - # Handle redirect on incorrect spelling of licence - ### - url(r'^about/license/$', RedirectView.as_view(url='/about/licence', permanent=True)), + ### + # Images that are used from other community sites + ### + url(r'^layout/images/(?P[a-z0-9_\.]+)$', RedirectView.as_view(url='/media/img/layout/%(f)s', permanent=True)), + ### + # Handle redirect on incorrect spelling of licence + ### + url(r'^about/license/$', RedirectView.as_view(url='/about/licence', permanent=True)), - ### - # Links included in emails on the lists (do we need to check this for XSS?) - ### - url(r'^mailpref/([a-z0-9_-]+)/$', pgweb.legacyurl.views.mailpref), + ### + # Links included in emails on the lists (do we need to check this for XSS?) + ### + url(r'^mailpref/([a-z0-9_-]+)/$', pgweb.legacyurl.views.mailpref), - # Some basic information about the connection (for debugging purposes) - url(r'^system_information/$', pgweb.core.views.system_information), - # Sync timestamp, for automirror - url(r'^web_sync_timestamp$', pgweb.core.views.sync_timestamp), + # Some basic information about the connection (for debugging purposes) + url(r'^system_information/$', pgweb.core.views.system_information), + # Sync timestamp, for automirror + url(r'^web_sync_timestamp$', pgweb.core.views.sync_timestamp), - # API endpoints - url(r'^api/varnish/purge/$', pgweb.core.views.api_varnish_purge), + # API endpoints + url(r'^api/varnish/purge/$', pgweb.core.views.api_varnish_purge), - # Override some URLs in admin, to provide our own pages - url(r'^admin/pending/$', pgweb.core.views.admin_pending), - url(r'^admin/purge/$', pgweb.core.views.admin_purge), - url(r'^admin/mergeorg/$', pgweb.core.views.admin_mergeorg), + # Override some URLs in admin, to provide our own pages + url(r'^admin/pending/$', pgweb.core.views.admin_pending), + url(r'^admin/purge/$', pgweb.core.views.admin_purge), + url(r'^admin/mergeorg/$', pgweb.core.views.admin_mergeorg), - # We use selectable only for /admin/ for now, so put it there to avoid caching issues - url(r'^admin/selectable/', include('selectable.urls')), + # We use selectable only for /admin/ for now, so put it there to avoid caching issues + url(r'^admin/selectable/', include('selectable.urls')), - # Uncomment the next line to enable the admin: - url(r'^admin/', include(admin.site.urls)), + # Uncomment the next line to enable the admin: + url(r'^admin/', include(admin.site.urls)), - # Crash testing URL :-) - url(r'^crashtest/$', pgweb.misc.views.crashtest), + # Crash testing URL :-) + url(r'^crashtest/$', pgweb.misc.views.crashtest), - # Fallback for static pages, must be at the bottom - url(r'^(.*)/$', pgweb.core.views.fallback), + # Fallback for static pages, must be at the bottom + url(r'^(.*)/$', pgweb.core.views.fallback), ] diff --git a/pgweb/util/admin.py b/pgweb/util/admin.py index e68041a9..6b59a1dc 100644 --- a/pgweb/util/admin.py +++ b/pgweb/util/admin.py @@ -6,101 +6,101 @@ from pgweb.mailqueue.util import send_simple_mail class PgwebAdmin(admin.ModelAdmin): - """ - ModelAdmin wrapper that will enable a few pg specific things: - * Markdown preview for markdown capable textfields (specified by - including them in a class variable named markdown_capable that is a tuple - of field names) - * Add an admin field for "notification", that can be sent to the submitter - of an item to inform them of moderation issues. - """ + """ + ModelAdmin wrapper that will enable a few pg specific things: + * Markdown preview for markdown capable textfields (specified by + including them in a class variable named markdown_capable that is a tuple + of field names) + * Add an admin field for "notification", that can be sent to the submitter + of an item to inform them of moderation issues. + """ - change_form_template = 'admin/change_form_pgweb.html' + change_form_template = 'admin/change_form_pgweb.html' - def formfield_for_dbfield(self, db_field, **kwargs): - fld = admin.ModelAdmin.formfield_for_dbfield(self, db_field, **kwargs) + def formfield_for_dbfield(self, db_field, **kwargs): + fld = admin.ModelAdmin.formfield_for_dbfield(self, db_field, **kwargs) - if hasattr(self.model, 'markdown_fields'): - if db_field.name in self.model.markdown_fields: - fld.widget.attrs['class'] = fld.widget.attrs['class'] + ' markdown_preview' - return fld + if hasattr(self.model, 'markdown_fields'): + if db_field.name in self.model.markdown_fields: + fld.widget.attrs['class'] = fld.widget.attrs['class'] + ' markdown_preview' + return fld - def change_view(self, request, object_id, form_url='', extra_context=None): - if hasattr(self.model, 'send_notification') and self.model.send_notification: - # Anything that sends notification supports manual notifications - if extra_context == None: - extra_context = dict() - extra_context['notifications'] = ModerationNotification.objects.filter(objecttype=self.model.__name__, objectid=object_id).order_by('date') + def change_view(self, request, object_id, form_url='', extra_context=None): + if hasattr(self.model, 'send_notification') and self.model.send_notification: + # Anything that sends notification supports manual notifications + if extra_context == None: + extra_context = dict() + extra_context['notifications'] = ModerationNotification.objects.filter(objecttype=self.model.__name__, objectid=object_id).order_by('date') - return super(PgwebAdmin, self).change_view(request, object_id, form_url, extra_context) + return super(PgwebAdmin, self).change_view(request, object_id, form_url, extra_context) - # Remove the builtin delete_selected action, so it doesn't - # conflict with the custom one. - def get_actions(self, request): - actions = super(PgwebAdmin, self).get_actions(request) - del actions['delete_selected'] - return actions + # Remove the builtin delete_selected action, so it doesn't + # conflict with the custom one. + def get_actions(self, request): + actions = super(PgwebAdmin, self).get_actions(request) + del actions['delete_selected'] + return actions - # Define a custom delete_selected action. This is required because the - # default one uses the delete functionality in QuerySet, which bypasses - # the delete() operation on the model, and thus won't send out our - # notifications. Manually calling delete() on each one will be slightly - # slower, but will send proper notifications - and it's not like this - # is something that happens often enough that we care about performance. - def custom_delete_selected(self, request, queryset): - for x in queryset: - x.delete() - custom_delete_selected.short_description = "Delete selected items" - actions=['custom_delete_selected'] + # Define a custom delete_selected action. This is required because the + # default one uses the delete functionality in QuerySet, which bypasses + # the delete() operation on the model, and thus won't send out our + # notifications. Manually calling delete() on each one will be slightly + # slower, but will send proper notifications - and it's not like this + # is something that happens often enough that we care about performance. + def custom_delete_selected(self, request, queryset): + for x in queryset: + x.delete() + custom_delete_selected.short_description = "Delete selected items" + actions=['custom_delete_selected'] - def save_model(self, request, obj, form, change): - if change and hasattr(self.model, 'send_notification') and self.model.send_notification: - # We only do processing if something changed, not when adding - # a new object. - if request.POST.has_key('new_notification') and request.POST['new_notification']: - # Need to send off a new notification. We'll also store - # it in the database for future reference, of course. - if not obj.org.email: - # Should not happen because we remove the form field. Thus - # a hard exception is ok. - raise Exception("Organisation does not have an email, canot send notification!") - n = ModerationNotification() - n.objecttype = obj.__class__.__name__ - n.objectid = obj.id - n.text = request.POST['new_notification'] - n.author = request.user.username - n.save() + def save_model(self, request, obj, form, change): + if change and hasattr(self.model, 'send_notification') and self.model.send_notification: + # We only do processing if something changed, not when adding + # a new object. + if request.POST.has_key('new_notification') and request.POST['new_notification']: + # Need to send off a new notification. We'll also store + # it in the database for future reference, of course. + if not obj.org.email: + # Should not happen because we remove the form field. Thus + # a hard exception is ok. + raise Exception("Organisation does not have an email, canot send notification!") + n = ModerationNotification() + n.objecttype = obj.__class__.__name__ + n.objectid = obj.id + n.text = request.POST['new_notification'] + n.author = request.user.username + n.save() - # Now send an email too - msgstr = _get_notification_text(obj, - request.POST['new_notification']) + # Now send an email too + msgstr = _get_notification_text(obj, + request.POST['new_notification']) - send_simple_mail(settings.NOTIFICATION_FROM, - obj.org.email, - "postgresql.org moderation notification", - msgstr) + send_simple_mail(settings.NOTIFICATION_FROM, + obj.org.email, + "postgresql.org moderation notification", + msgstr) - # Also generate a mail to the moderators - send_simple_mail(settings.NOTIFICATION_FROM, - settings.NOTIFICATION_EMAIL, - "Moderation comment on %s %s" % (obj.__class__._meta.verbose_name, obj.id), - _get_moderator_notification_text(obj, - request.POST['new_notification'], - request.user.username - )) + # Also generate a mail to the moderators + send_simple_mail(settings.NOTIFICATION_FROM, + settings.NOTIFICATION_EMAIL, + "Moderation comment on %s %s" % (obj.__class__._meta.verbose_name, obj.id), + _get_moderator_notification_text(obj, + request.POST['new_notification'], + request.user.username + )) - # Either no notifications, or done with notifications - super(PgwebAdmin, self).save_model(request, obj, form, change) + # Either no notifications, or done with notifications + super(PgwebAdmin, self).save_model(request, obj, form, change) def register_pgwebadmin(model): - admin.site.register(model, PgwebAdmin) + admin.site.register(model, PgwebAdmin) def _get_notification_text(obj, txt): - objtype = obj.__class__._meta.verbose_name - return """You recently submitted a %s to postgresql.org. + objtype = obj.__class__._meta.verbose_name + return """You recently submitted a %s to postgresql.org. During moderation, this item has received comments that need to be addressed before it can be approved. The comment given by the moderator is: @@ -114,12 +114,12 @@ request, and your submission will be re-moderated. def _get_moderator_notification_text(obj, txt, moderator): - return """Moderator %s made a comment to a pending object: + return """Moderator %s made a comment to a pending object: Object type: %s Object id: %s Comment: %s """ % (moderator, - obj.__class__._meta.verbose_name, - obj.id, - txt, - ) + obj.__class__._meta.verbose_name, + obj.id, + txt, + ) diff --git a/pgweb/util/auth.py b/pgweb/util/auth.py index bb977ddc..de016ab7 100644 --- a/pgweb/util/auth.py +++ b/pgweb/util/auth.py @@ -4,19 +4,19 @@ from django.contrib.auth.backends import ModelBackend # Special version of the authentication backend, so we can handle things like # forced lowercasing of usernames. class AuthBackend(ModelBackend): - def authenticate(self, username=None, password=None): - try: - user = User.objects.get(username=username.lower()) + def authenticate(self, username=None, password=None): + try: + user = User.objects.get(username=username.lower()) - # If user is found, check the password using the django - # methods alone. - if user.check_password(password): - return user + # If user is found, check the password using the django + # methods alone. + if user.check_password(password): + return user - # User found but password wrong --> tell django it is wrong - return None - except User.DoesNotExist: - # User not found, so clearly they can't log in! - return None + # User found but password wrong --> tell django it is wrong + return None + except User.DoesNotExist: + # User not found, so clearly they can't log in! + return None - return None # Should never get here, but just in case... + return None # Should never get here, but just in case... diff --git a/pgweb/util/contexts.py b/pgweb/util/contexts.py index b24f5438..a29a64f4 100644 --- a/pgweb/util/contexts.py +++ b/pgweb/util/contexts.py @@ -4,131 +4,131 @@ from django.conf import settings # This is the whole site navigation structure. Stick in a smarter file? sitenav = { - 'about': [ - {'title': 'About', 'link':'/about/'}, - {'title': 'Code of Conduct', 'link':'/about/policies/coc/', 'submenu': [ - {'title': 'Committee', 'link':'/about/policies/coc_committee/'} - ]}, - {'title': 'Feature Matrix', 'link':'/about/featurematrix/'}, - {'title': 'Donate', 'link':'/about/donate/'}, - {'title': 'History', 'link':'/docs/current/history.html'}, - {'title': 'Sponsors', 'link':'/about/sponsors/', 'submenu': [ - {'title': 'Servers', 'link': '/about/servers/'}, - ]}, - {'title': 'Latest News', 'link':'/about/newsarchive/'}, - {'title': 'Upcoming Events', 'link':'/about/events/'}, - {'title': 'Press', 'link':'/about/press/'}, - {'title': 'Licence', 'link':'/about/licence/'}, - ], - 'download': [ - {'title': 'Downloads', 'link':'/download/', 'submenu': [ - {'title': 'Binary', 'link':'/download/'}, - {'title': 'Source', 'link':'/ftp/source/'} - ]}, - {'title': 'Software Catalogue', 'link':'/download/product-categories/'}, - {'title': 'File Browser', 'link':'/ftp/'}, - ], - 'docs': [ - {'title': 'Documentation', 'link':'/docs/'}, - {'title': 'Manuals', 'link':'/docs/manuals/', 'submenu': [ - {'title': 'Archive', 'link':'/docs/manuals/archive/'}, - {'title': 'French', 'link':'https://docs.postgresql.fr/'}, - {'title': 'Japanese', 'link':'http://www.postgresql.jp/document/'}, - {'title': 'Russian', 'link':'https://postgrespro.ru/docs/postgresql'}, - ]}, - {'title': 'Books', 'link':'/docs/books/'}, - {'title': 'Online Resources', 'link':'/docs/online-resources/'}, - {'title': 'Wiki', 'link':'https://wiki.postgresql.org'}, - ], - 'community': [ - {'title': 'Community', 'link':'/community/'}, - {'title': 'Contributors', 'link':'/community/contributors/'}, - {'title': 'Mailing Lists', 'link':'/list/'}, - {'title': 'IRC', 'link':'/community/irc/'}, + 'about': [ + {'title': 'About', 'link':'/about/'}, + {'title': 'Code of Conduct', 'link':'/about/policies/coc/', 'submenu': [ + {'title': 'Committee', 'link':'/about/policies/coc_committee/'} + ]}, + {'title': 'Feature Matrix', 'link':'/about/featurematrix/'}, + {'title': 'Donate', 'link':'/about/donate/'}, + {'title': 'History', 'link':'/docs/current/history.html'}, + {'title': 'Sponsors', 'link':'/about/sponsors/', 'submenu': [ + {'title': 'Servers', 'link': '/about/servers/'}, + ]}, + {'title': 'Latest News', 'link':'/about/newsarchive/'}, + {'title': 'Upcoming Events', 'link':'/about/events/'}, + {'title': 'Press', 'link':'/about/press/'}, + {'title': 'Licence', 'link':'/about/licence/'}, + ], + 'download': [ + {'title': 'Downloads', 'link':'/download/', 'submenu': [ + {'title': 'Binary', 'link':'/download/'}, + {'title': 'Source', 'link':'/ftp/source/'} + ]}, + {'title': 'Software Catalogue', 'link':'/download/product-categories/'}, + {'title': 'File Browser', 'link':'/ftp/'}, + ], + 'docs': [ + {'title': 'Documentation', 'link':'/docs/'}, + {'title': 'Manuals', 'link':'/docs/manuals/', 'submenu': [ + {'title': 'Archive', 'link':'/docs/manuals/archive/'}, + {'title': 'French', 'link':'https://docs.postgresql.fr/'}, + {'title': 'Japanese', 'link':'http://www.postgresql.jp/document/'}, + {'title': 'Russian', 'link':'https://postgrespro.ru/docs/postgresql'}, + ]}, + {'title': 'Books', 'link':'/docs/books/'}, + {'title': 'Online Resources', 'link':'/docs/online-resources/'}, + {'title': 'Wiki', 'link':'https://wiki.postgresql.org'}, + ], + 'community': [ + {'title': 'Community', 'link':'/community/'}, + {'title': 'Contributors', 'link':'/community/contributors/'}, + {'title': 'Mailing Lists', 'link':'/list/'}, + {'title': 'IRC', 'link':'/community/irc/'}, {'title': 'Slack', 'link':'https://postgres-slack.herokuapp.com/'}, - {'title': 'Local User Groups', 'link':'/community/user-groups/'}, + {'title': 'Local User Groups', 'link':'/community/user-groups/'}, {'title': 'Events', 'link':'/about/events/'}, - {'title': 'International Sites','link':'/community/international/'}, - {'title': 'Recognition Guidelines','link':'/community/recognition/'}, - ], - 'developer': [ - {'title': 'Developers', 'link':'/developer/'}, - {'title': 'Core Team', 'link':'/developer/core/'}, - {'title': 'Roadmap', 'link':'/developer/roadmap/'}, - {'title': 'Coding', 'link':'/developer/coding/'}, - {'title': 'Testing', 'link':'/developer/testing/', 'submenu': [ - {'title': 'Beta Information', 'link':'/developer/beta/'}, - ]}, - {'title': 'Mailing Lists', 'link':'/list/'}, - {'title': 'Developer FAQ', 'link':'https://wiki.postgresql.org/wiki/Developer_FAQ'}, - ], - 'support': [ - {'title': 'Support', 'link':'/support/'}, - {'title': 'Versioning Policy', 'link':'/support/versioning/'}, - {'title': 'Security', 'link':'/support/security/'}, - {'title': 'Professional Services','link':'/support/professional_support/'}, - {'title': 'Hosting Solutions', 'link':'/support/professional_hosting/'}, - {'title': 'Report a Bug', 'link':'/account/submitbug/'}, - ], - 'account': [ - {'title': 'Your account', 'link':'/account'}, - {'title': 'Profile', 'link':'/account/profile'}, - {'title': 'Submitted data', 'link':'/account', 'submenu': [ - {'title': 'News Articles', 'link':'/account/edit/news/'}, - {'title': 'Events', 'link':'/account/edit/events/'}, - {'title': 'Products', 'link':'/account/edit/products/'}, - {'title': 'Professional Services', 'link':'/account/edit/services/'}, - {'title': 'Organisations', 'link':'/account/edit/organisations/'}, - ]}, - {'title': 'Change password', 'link':'/account/changepwd/'}, - {'title': 'Logout', 'link':'/account/logout'}, - ], + {'title': 'International Sites','link':'/community/international/'}, + {'title': 'Recognition Guidelines','link':'/community/recognition/'}, + ], + 'developer': [ + {'title': 'Developers', 'link':'/developer/'}, + {'title': 'Core Team', 'link':'/developer/core/'}, + {'title': 'Roadmap', 'link':'/developer/roadmap/'}, + {'title': 'Coding', 'link':'/developer/coding/'}, + {'title': 'Testing', 'link':'/developer/testing/', 'submenu': [ + {'title': 'Beta Information', 'link':'/developer/beta/'}, + ]}, + {'title': 'Mailing Lists', 'link':'/list/'}, + {'title': 'Developer FAQ', 'link':'https://wiki.postgresql.org/wiki/Developer_FAQ'}, + ], + 'support': [ + {'title': 'Support', 'link':'/support/'}, + {'title': 'Versioning Policy', 'link':'/support/versioning/'}, + {'title': 'Security', 'link':'/support/security/'}, + {'title': 'Professional Services','link':'/support/professional_support/'}, + {'title': 'Hosting Solutions', 'link':'/support/professional_hosting/'}, + {'title': 'Report a Bug', 'link':'/account/submitbug/'}, + ], + 'account': [ + {'title': 'Your account', 'link':'/account'}, + {'title': 'Profile', 'link':'/account/profile'}, + {'title': 'Submitted data', 'link':'/account', 'submenu': [ + {'title': 'News Articles', 'link':'/account/edit/news/'}, + {'title': 'Events', 'link':'/account/edit/events/'}, + {'title': 'Products', 'link':'/account/edit/products/'}, + {'title': 'Professional Services', 'link':'/account/edit/services/'}, + {'title': 'Organisations', 'link':'/account/edit/organisations/'}, + ]}, + {'title': 'Change password', 'link':'/account/changepwd/'}, + {'title': 'Logout', 'link':'/account/logout'}, + ], } def get_nav_menu(section): - if sitenav.has_key(section): - return sitenav[section] - else: - return {} + if sitenav.has_key(section): + return sitenav[section] + else: + return {} def render_pgweb(request, section, template, context): - context['navmenu'] = get_nav_menu(section) - return render(request, template, context) + context['navmenu'] = get_nav_menu(section) + return render(request, template, context) def _get_gitrev(): - # Return the current git revision, that is used for - # cache-busting URLs. - try: - with open('.git/refs/heads/master') as f: - return f.readline()[:8] - except IOError: - # A "git gc" will remove the ref and replace it with a packed-refs. - try: - with open('.git/packed-refs') as f: - for l in f.readlines(): - if l.endswith("refs/heads/master\n"): - return l[:8] - # Not found in packed-refs. Meh, just make one up. - return 'ffffffff' - except IOError: - # If packed-refs also can't be read, just give up - return 'eeeeeeee' + # Return the current git revision, that is used for + # cache-busting URLs. + try: + with open('.git/refs/heads/master') as f: + return f.readline()[:8] + except IOError: + # A "git gc" will remove the ref and replace it with a packed-refs. + try: + with open('.git/packed-refs') as f: + for l in f.readlines(): + if l.endswith("refs/heads/master\n"): + return l[:8] + # Not found in packed-refs. Meh, just make one up. + return 'ffffffff' + except IOError: + # If packed-refs also can't be read, just give up + return 'eeeeeeee' # Template context processor to add information about the root link and # the current git revision. git revision is returned as a lazy object so # we don't spend effort trying to load it if we don't need it (though # all general pages will need it since it's used to render the css urls) def PGWebContextProcessor(request): - gitrev = SimpleLazyObject(_get_gitrev) - if request.is_secure(): - return { - 'link_root': settings.SITE_ROOT, - 'do_esi': settings.DO_ESI, - 'gitrev': gitrev, - } - else: - return { - 'gitrev': gitrev, - 'do_esi': settings.DO_ESI, - } + gitrev = SimpleLazyObject(_get_gitrev) + if request.is_secure(): + return { + 'link_root': settings.SITE_ROOT, + 'do_esi': settings.DO_ESI, + 'gitrev': gitrev, + } + else: + return { + 'gitrev': gitrev, + 'do_esi': settings.DO_ESI, + } diff --git a/pgweb/util/decorators.py b/pgweb/util/decorators.py index ca8d40f1..f29fd160 100644 --- a/pgweb/util/decorators.py +++ b/pgweb/util/decorators.py @@ -4,54 +4,54 @@ from collections import defaultdict from django.contrib.auth.decorators import login_required as django_login_required def nocache(fn): - def _nocache(request, *_args, **_kwargs): - resp = fn(request, *_args, **_kwargs) - resp['Cache-Control'] = 's-maxage=0' - return resp - return _nocache + def _nocache(request, *_args, **_kwargs): + resp = fn(request, *_args, **_kwargs) + resp['Cache-Control'] = 's-maxage=0' + return resp + return _nocache def cache(days=0, hours=0, minutes=0, seconds=0): - "Set the server to cache object a specified time. td must be a timedelta object" - def _cache(fn): - def __cache(request, *_args, **_kwargs): - resp = fn(request, *_args, **_kwargs) - td = datetime.timedelta(hours=hours, minutes=minutes, seconds=seconds) - resp['Cache-Control'] = 's-maxage=%s' % (td.days*3600*24 + td.seconds) - return resp - return __cache - return _cache + "Set the server to cache object a specified time. td must be a timedelta object" + def _cache(fn): + def __cache(request, *_args, **_kwargs): + resp = fn(request, *_args, **_kwargs) + td = datetime.timedelta(hours=hours, minutes=minutes, seconds=seconds) + resp['Cache-Control'] = 's-maxage=%s' % (td.days*3600*24 + td.seconds) + return resp + return __cache + return _cache def allow_frames(fn): - def _allow_frames(request, *_args, **_kwargs): - resp = fn(request, *_args, **_kwargs) - resp.x_allow_frames = True - return resp - return _allow_frames + def _allow_frames(request, *_args, **_kwargs): + resp = fn(request, *_args, **_kwargs) + resp.x_allow_frames = True + return resp + return _allow_frames def content_sources(what, source): - def _script_sources(fn): - def __script_sources(request, *_args, **_kwargs): - resp = fn(request, *_args, **_kwargs) - if not hasattr(resp, 'x_allow_extra_sources'): - resp.x_allow_extra_sources = defaultdict(list) - resp.x_allow_extra_sources[what].append(source) - return resp - return __script_sources - return _script_sources + def _script_sources(fn): + def __script_sources(request, *_args, **_kwargs): + resp = fn(request, *_args, **_kwargs) + if not hasattr(resp, 'x_allow_extra_sources'): + resp.x_allow_extra_sources = defaultdict(list) + resp.x_allow_extra_sources[what].append(source) + return resp + return __script_sources + return _script_sources def script_sources(source): - return content_sources('script', source) + return content_sources('script', source) def frame_sources(source): - return content_sources('frame', source) + return content_sources('frame', source) # A wrapped version of login_required that throws an exception if it's # used on a path that's not under /account/. def login_required(f): - @wraps(f) - def wrapper(*args, **kwargs): - request = args[0] - if not (request.path.startswith('/account/') or request.path.startswith('/admin/')): - raise Exception("Login required in bad path, aborting with exception.") - return django_login_required(f)(*args, **kwargs) - return wrapper + @wraps(f) + def wrapper(*args, **kwargs): + request = args[0] + if not (request.path.startswith('/account/') or request.path.startswith('/admin/')): + raise Exception("Login required in bad path, aborting with exception.") + return django_login_required(f)(*args, **kwargs) + return wrapper diff --git a/pgweb/util/helpers.py b/pgweb/util/helpers.py index 6b3f08de..2159381b 100644 --- a/pgweb/util/helpers.py +++ b/pgweb/util/helpers.py @@ -5,91 +5,91 @@ from django.template.loader import get_template import django.utils.xmlutils def simple_form(instancetype, itemid, request, formclass, formtemplate='base/form.html', redirect='/account/', navsection='account', fixedfields=None, createifempty=False): - if itemid == 'new': - instance = instancetype() - else: - # Regular form item, attempt to edit it - try: - int(itemid) - except ValueError: - raise Http404("Invalid URL") - if createifempty: - (instance, wascreated) = instancetype.objects.get_or_create(pk=itemid) - else: - instance = get_object_or_404(instancetype, pk=itemid) - if hasattr(instance, 'submitter'): - if not instance.submitter == request.user: - raise Exception("You are not the owner of this item!") - elif hasattr(instance, 'verify_submitter'): - if not instance.verify_submitter(request.user): - raise Exception("You are not the owner of this item!") + if itemid == 'new': + instance = instancetype() + else: + # Regular form item, attempt to edit it + try: + int(itemid) + except ValueError: + raise Http404("Invalid URL") + if createifempty: + (instance, wascreated) = instancetype.objects.get_or_create(pk=itemid) + else: + instance = get_object_or_404(instancetype, pk=itemid) + if hasattr(instance, 'submitter'): + if not instance.submitter == request.user: + raise Exception("You are not the owner of this item!") + elif hasattr(instance, 'verify_submitter'): + if not instance.verify_submitter(request.user): + raise Exception("You are not the owner of this item!") - if request.method == 'POST': - # Process this form - form = formclass(data=request.POST, instance=instance) - if form.is_valid(): - r = form.save(commit=False) - r.submitter = request.user - # Set fixed fields. Note that this will not work if the fixed fields are ManyToMany, - # but we'll fix that sometime in the future - if fixedfields: - for k,v in fixedfields.items(): - setattr(r, k, v) - r.save() + if request.method == 'POST': + # Process this form + form = formclass(data=request.POST, instance=instance) + if form.is_valid(): + r = form.save(commit=False) + r.submitter = request.user + # Set fixed fields. Note that this will not work if the fixed fields are ManyToMany, + # but we'll fix that sometime in the future + if fixedfields: + for k,v in fixedfields.items(): + setattr(r, k, v) + r.save() - # If we have a callback with the current user - if hasattr(form, 'apply_submitter'): - form.apply_submitter(r, request.user) - r.save() + # If we have a callback with the current user + if hasattr(form, 'apply_submitter'): + form.apply_submitter(r, request.user) + r.save() - form.save_m2m() + form.save_m2m() - return HttpResponseRedirect(redirect) - else: - # Generate form - form = formclass(instance=instance) + return HttpResponseRedirect(redirect) + else: + # Generate form + form = formclass(instance=instance) - if hasattr(form, 'filter_by_user'): - form.filter_by_user(request.user) + if hasattr(form, 'filter_by_user'): + form.filter_by_user(request.user) - for fn in form.fields: - if fn in getattr(instancetype, 'markdown_fields', []): - form.fields[fn].widget.attrs.update({'class': 'markdown-content'}) + for fn in form.fields: + if fn in getattr(instancetype, 'markdown_fields', []): + form.fields[fn].widget.attrs.update({'class': 'markdown-content'}) - for togg in getattr(form, 'toggle_fields', []): - form.fields[togg['name']].widget.attrs.update({ - 'data-toggles': ','.join(togg['fields']), - 'data-toggle-invert': togg['invert'] and 'true' or 'false', - 'class': 'toggle-checkbox', - }) + for togg in getattr(form, 'toggle_fields', []): + form.fields[togg['name']].widget.attrs.update({ + 'data-toggles': ','.join(togg['fields']), + 'data-toggle-invert': togg['invert'] and 'true' or 'false', + 'class': 'toggle-checkbox', + }) - return render_pgweb(request, navsection, formtemplate, { - 'form': form, - 'formitemtype': instance._meta.verbose_name, - 'form_intro': hasattr(form, 'form_intro') and form.form_intro or None, - 'described_checkboxes': getattr(form, 'described_checkboxes', {}), - 'savebutton': (itemid == "new") and "Submit New" or "Save", - 'operation': (itemid == "new") and "New" or "Edit", - }) + return render_pgweb(request, navsection, formtemplate, { + 'form': form, + 'formitemtype': instance._meta.verbose_name, + 'form_intro': hasattr(form, 'form_intro') and form.form_intro or None, + 'described_checkboxes': getattr(form, 'described_checkboxes', {}), + 'savebutton': (itemid == "new") and "Submit New" or "Save", + 'operation': (itemid == "new") and "New" or "Edit", + }) def template_to_string(templatename, attrs = {}): - return get_template(templatename).render(attrs) + return get_template(templatename).render(attrs) def HttpServerError(request, msg): - r = render(request, 'errors/500.html', { - 'message': msg, - }) - r.status_code = 500 - return r + r = render(request, 'errors/500.html', { + 'message': msg, + }) + r.status_code = 500 + return r class PgXmlHelper(django.utils.xmlutils.SimplerXMLGenerator): - def __init__(self, outstream, skipempty=False): - django.utils.xmlutils.SimplerXMLGenerator.__init__(self, outstream, 'utf-8') - self.skipempty = skipempty + def __init__(self, outstream, skipempty=False): + django.utils.xmlutils.SimplerXMLGenerator.__init__(self, outstream, 'utf-8') + self.skipempty = skipempty - def add_xml_element(self, name, value): - if self.skipempty and value=='': return - self.startElement(name, {}) - self.characters(value) - self.endElement(name) + def add_xml_element(self, name, value): + if self.skipempty and value=='': return + self.startElement(name, {}) + self.characters(value) + self.endElement(name) diff --git a/pgweb/util/middleware.py b/pgweb/util/middleware.py index 12f54d8f..9abcae2e 100644 --- a/pgweb/util/middleware.py +++ b/pgweb/util/middleware.py @@ -14,61 +14,61 @@ except ImportError: _thread_locals = local() def get_current_user(): - return getattr(_thread_locals, 'user', None) + return getattr(_thread_locals, 'user', None) # General middleware for all middleware functionality specific to the pgweb # project. class PgMiddleware(object): - def process_view(self, request, view_func, view_args, view_kwargs): - return None + def process_view(self, request, view_func, view_args, view_kwargs): + return None - def process_request(self, request): + def process_request(self, request): # Thread local store for username, see comment at the top of this file - _thread_locals.user = getattr(request, 'user', None) - initialize_template_collection() + _thread_locals.user = getattr(request, 'user', None) + initialize_template_collection() - def process_response(self, request, response): - # Set xkey representing the templates that are in use so we can do efficient - # varnish purging on commits. - tlist = get_all_templates() - if 'base/esi.html' in tlist: - response['x-do-esi'] = "1" - tlist.remove('base/esi.html') - if tlist: - response['xkey'] = ' '.join(["pgwt_{0}".format(hashlib.md5(t).hexdigest()) for t in tlist]) + def process_response(self, request, response): + # Set xkey representing the templates that are in use so we can do efficient + # varnish purging on commits. + tlist = get_all_templates() + if 'base/esi.html' in tlist: + response['x-do-esi'] = "1" + tlist.remove('base/esi.html') + if tlist: + response['xkey'] = ' '.join(["pgwt_{0}".format(hashlib.md5(t).hexdigest()) for t in tlist]) - # Set security headers - sources = OrderedDict([ - ('default', ["'self'", ]), - ('img', ['*', 'data:', ]), - ('script', ["'self'", "www.google-analytics.com", "ssl.google-analytics.com", "data:"]), - ('connect', ["'self'", "www.google-analytics.com", "ssl.google-analytics.com"]), - ('media', ["'self'", ]), - ('style', ["'self'", "fonts.googleapis.com"]), - ('font', ["'self'", "fonts.gstatic.com", "data:" ]), - ]) - if hasattr(response, 'x_allow_extra_sources'): - for k,v in response.x_allow_extra_sources.items(): - if k in sources: - sources[k].extend(v) - else: - sources[k] = v + # Set security headers + sources = OrderedDict([ + ('default', ["'self'", ]), + ('img', ['*', 'data:', ]), + ('script', ["'self'", "www.google-analytics.com", "ssl.google-analytics.com", "data:"]), + ('connect', ["'self'", "www.google-analytics.com", "ssl.google-analytics.com"]), + ('media', ["'self'", ]), + ('style', ["'self'", "fonts.googleapis.com"]), + ('font', ["'self'", "fonts.gstatic.com", "data:" ]), + ]) + if hasattr(response, 'x_allow_extra_sources'): + for k,v in response.x_allow_extra_sources.items(): + if k in sources: + sources[k].extend(v) + else: + sources[k] = v - security_policies = ["{0}-src {1}".format(k," ".join(v)) for k,v in sources.items()] + security_policies = ["{0}-src {1}".format(k," ".join(v)) for k,v in sources.items()] - if not getattr(response, 'x_allow_frames', False): - response['X-Frame-Options'] = 'DENY' - security_policies.append("frame-ancestors 'none'") + if not getattr(response, 'x_allow_frames', False): + response['X-Frame-Options'] = 'DENY' + security_policies.append("frame-ancestors 'none'") - if hasattr(settings, 'SECURITY_POLICY_REPORT_URI'): - security_policies.append("report-uri " + settings.SECURITY_POLICY_REPORT_URI) + if hasattr(settings, 'SECURITY_POLICY_REPORT_URI'): + security_policies.append("report-uri " + settings.SECURITY_POLICY_REPORT_URI) - if security_policies: - if getattr(settings, 'SECURITY_POLICY_REPORT_ONLY', False): - response['Content-Security-Policy-Report-Only'] = " ; ".join(security_policies) - else: - response['Content-Security-Policy'] = " ; ".join(security_policies) + if security_policies: + if getattr(settings, 'SECURITY_POLICY_REPORT_ONLY', False): + response['Content-Security-Policy-Report-Only'] = " ; ".join(security_policies) + else: + response['Content-Security-Policy'] = " ; ".join(security_policies) - response['X-XSS-Protection'] = "1; mode=block" - return response + response['X-XSS-Protection'] = "1; mode=block" + return response diff --git a/pgweb/util/misc.py b/pgweb/util/misc.py index 80aae44f..760ba872 100644 --- a/pgweb/util/misc.py +++ b/pgweb/util/misc.py @@ -9,84 +9,84 @@ from pgweb.util.helpers import template_to_string import re def send_template_mail(sender, receiver, subject, templatename, templateattr={}, usergenerated=False, cc=None, replyto=None, receivername=None, sendername=None, messageid=None): - d = { - 'link_root': settings.SITE_ROOT, - } - d.update(templateattr) - send_simple_mail(sender, receiver, subject, - template_to_string(templatename, d), - usergenerated=usergenerated, cc=cc, replyto=replyto, - receivername=receivername, sendername=sendername, - messageid=messageid) + d = { + 'link_root': settings.SITE_ROOT, + } + d.update(templateattr) + send_simple_mail(sender, receiver, subject, + template_to_string(templatename, d), + usergenerated=usergenerated, cc=cc, replyto=replyto, + receivername=receivername, sendername=sendername, + messageid=messageid) def get_client_ip(request): - """ - Get the IP of the client. If the client is served through our Varnish caches, - or behind one of our SSL proxies, make sure to get the *actual* client IP, - and not the IP of the cache/proxy. - """ - if request.META.has_key('HTTP_X_FORWARDED_FOR'): - # There is a x-forwarded-for header, so trust it but only if the actual connection - # is coming in from one of our frontends. - if request.META['REMOTE_ADDR'] in settings.FRONTEND_SERVERS: - return request.META['HTTP_X_FORWARDED_FOR'] + """ + Get the IP of the client. If the client is served through our Varnish caches, + or behind one of our SSL proxies, make sure to get the *actual* client IP, + and not the IP of the cache/proxy. + """ + if request.META.has_key('HTTP_X_FORWARDED_FOR'): + # There is a x-forwarded-for header, so trust it but only if the actual connection + # is coming in from one of our frontends. + if request.META['REMOTE_ADDR'] in settings.FRONTEND_SERVERS: + return request.META['HTTP_X_FORWARDED_FOR'] - # Else fall back and return the actual IP of the connection - return request.META['REMOTE_ADDR'] + # Else fall back and return the actual IP of the connection + return request.META['REMOTE_ADDR'] def varnish_purge_xkey(xkey): - """ - Purge the specified xkey from Varnish. - """ - connection.cursor().execute("SELECT varnish_purge_xkey(%s)", (xkey, )) + """ + Purge the specified xkey from Varnish. + """ + connection.cursor().execute("SELECT varnish_purge_xkey(%s)", (xkey, )) def varnish_purge(url): - """ - Purge the specified URL from Varnish. Will add initial anchor to the URL, - but no trailing one, so by default a wildcard match is done. - """ - url = '^%s' % url - connection.cursor().execute("SELECT varnish_purge(%s)", (url, )) + """ + Purge the specified URL from Varnish. Will add initial anchor to the URL, + but no trailing one, so by default a wildcard match is done. + """ + url = '^%s' % url + connection.cursor().execute("SELECT varnish_purge(%s)", (url, )) def varnish_purge_expr(expr): - """ - Purge the specified expression from Varnish. Does not modify the expression - at all, so be very careful! - """ - connection.cursor().execute("SELECT varnish_purge_expr(%s)", (expr, )) + """ + Purge the specified expression from Varnish. Does not modify the expression + at all, so be very careful! + """ + connection.cursor().execute("SELECT varnish_purge_expr(%s)", (expr, )) def version_sort(l): - """ - map a directory name to a format that will show up sensibly in an ascii sort - We specifically detect entries that look like versions. Weird things may happen - if there is a mix of versions and non-versions in the same directory, but we - generally don't have that. - """ - mkey = l['link'] - m = re.match('v?([0-9]+)\.([0-9]+)\.([0-9]+)$',l['url']) - if m: - mkey = m.group(1) + '%02d' % int(m.group(2)) + '%02d' % int(m.group(3)); - m = re.match('v?([0-9]+)\.([0-9]+)$',l['url']) - if m: - mkey = m.group(1) + '%02d' % int(m.group(2)); - # SOOO ugly. But if it's v10 and up, just prefix it to get it higher - if int(m.group(1)) >= 10: - mkey = 'a' + mkey - m = re.match('v?([0-9]+)$', l['url']) - if m: - # This can only happen on 10+, so... - mkey = 'a' + m.group(1) + '0' + """ + map a directory name to a format that will show up sensibly in an ascii sort + We specifically detect entries that look like versions. Weird things may happen + if there is a mix of versions and non-versions in the same directory, but we + generally don't have that. + """ + mkey = l['link'] + m = re.match('v?([0-9]+)\.([0-9]+)\.([0-9]+)$',l['url']) + if m: + mkey = m.group(1) + '%02d' % int(m.group(2)) + '%02d' % int(m.group(3)); + m = re.match('v?([0-9]+)\.([0-9]+)$',l['url']) + if m: + mkey = m.group(1) + '%02d' % int(m.group(2)); + # SOOO ugly. But if it's v10 and up, just prefix it to get it higher + if int(m.group(1)) >= 10: + mkey = 'a' + mkey + m = re.match('v?([0-9]+)$', l['url']) + if m: + # This can only happen on 10+, so... + mkey = 'a' + m.group(1) + '0' - return mkey + return mkey def generate_random_token(): - """ - Generate a random token of 64 characters. This token will be - generated using a strong random number, and then hex encoded to make - sure all characters are safe to put in emails and URLs. - """ - s = SHA256.new() - r = Random.new() - s.update(r.read(250)) - return s.hexdigest() + """ + Generate a random token of 64 characters. This token will be + generated using a strong random number, and then hex encoded to make + sure all characters are safe to put in emails and URLs. + """ + s = SHA256.new() + r = Random.new() + s.update(r.read(250)) + return s.hexdigest() diff --git a/pgweb/util/moderation.py b/pgweb/util/moderation.py index 9c5ede2d..d67c9748 100644 --- a/pgweb/util/moderation.py +++ b/pgweb/util/moderation.py @@ -8,19 +8,19 @@ from pgweb.quotes.models import Quote # Pending moderation requests (including URLs for the admin interface)) def _get_unapproved_list(objecttype): - objects = objecttype.objects.filter(approved=False) - if not len(objects): return None - return { 'name': objects[0]._meta.verbose_name_plural, 'entries': - [{'url': '/admin/%s/%s/%s/' % (x._meta.app_label, x._meta.model_name, x.pk), 'title': unicode(x)} for x in objects] - } + objects = objecttype.objects.filter(approved=False) + if not len(objects): return None + return { 'name': objects[0]._meta.verbose_name_plural, 'entries': + [{'url': '/admin/%s/%s/%s/' % (x._meta.app_label, x._meta.model_name, x.pk), 'title': unicode(x)} for x in objects] + } def get_all_pending_moderations(): - applist = [ - _get_unapproved_list(NewsArticle), - _get_unapproved_list(Event), - _get_unapproved_list(Organisation), - _get_unapproved_list(Product), - _get_unapproved_list(ProfessionalService), - _get_unapproved_list(Quote), - ] - return [x for x in applist if x] + applist = [ + _get_unapproved_list(NewsArticle), + _get_unapproved_list(Event), + _get_unapproved_list(Organisation), + _get_unapproved_list(Product), + _get_unapproved_list(ProfessionalService), + _get_unapproved_list(Quote), + ] + return [x for x in applist if x] diff --git a/pgweb/util/signals.py b/pgweb/util/signals.py index 558953bc..178ec04d 100644 --- a/pgweb/util/signals.py +++ b/pgweb/util/signals.py @@ -9,161 +9,161 @@ from pgweb.util.misc import varnish_purge from pgweb.mailqueue.util import send_simple_mail def _build_url(obj): - if obj.id: - return "%s/admin/%s/%s/%s/" % ( - settings.SITE_ROOT, - obj._meta.app_label, - obj._meta.model_name, - obj.id, - ) - else: - return "%s/admin/%s/%s/" % ( - settings.SITE_ROOT, - obj._meta.app_label, - obj._meta.model_name, - ) + if obj.id: + return "%s/admin/%s/%s/%s/" % ( + settings.SITE_ROOT, + obj._meta.app_label, + obj._meta.model_name, + obj.id, + ) + else: + return "%s/admin/%s/%s/" % ( + settings.SITE_ROOT, + obj._meta.app_label, + obj._meta.model_name, + ) def _get_full_text_diff(obj, oldobj): - fieldlist = _get_all_notification_fields(obj) - if not fieldlist: - return "This object does not know how to express ifself." + fieldlist = _get_all_notification_fields(obj) + if not fieldlist: + return "This object does not know how to express ifself." - s = "\n\n".join(["\n".join(filter(lambda x: not x.startswith('@@'), - difflib.unified_diff( - _get_attr_value(oldobj, n).splitlines(), - _get_attr_value(obj, n).splitlines(), - n=1, - lineterm='', - fromfile=n, - tofile=n, - )) - ) for n in fieldlist if _get_attr_value(oldobj, n) != _get_attr_value(obj, n)]) - if not s: return None - return s + s = "\n\n".join(["\n".join(filter(lambda x: not x.startswith('@@'), + difflib.unified_diff( + _get_attr_value(oldobj, n).splitlines(), + _get_attr_value(obj, n).splitlines(), + n=1, + lineterm='', + fromfile=n, + tofile=n, + )) + ) for n in fieldlist if _get_attr_value(oldobj, n) != _get_attr_value(obj, n)]) + if not s: return None + return s def _get_all_notification_fields(obj): - if hasattr(obj, 'notify_fields'): - return obj.notify_fields - else: - # Include all field names except specified ones, - # that are local to this model (not auto created) - return [f.name for f in obj._meta.get_fields() if not f.name in ('approved', 'submitter', 'id', ) and not f.auto_created] + if hasattr(obj, 'notify_fields'): + return obj.notify_fields + else: + # Include all field names except specified ones, + # that are local to this model (not auto created) + return [f.name for f in obj._meta.get_fields() if not f.name in ('approved', 'submitter', 'id', ) and not f.auto_created] def _get_attr_value(obj, fieldname): - # see if this is a Many-to-many field. If yes, we want to print - # it out as a pretty list - if isinstance(obj._meta.get_field(fieldname), models.ManyToManyField): - # XXX: Changes to ManyToMany fields can't be tracked here :( - # For now, we have no good way to deal with it so, well, don't. - # (trying to get the value will return None for it) - return '' + # see if this is a Many-to-many field. If yes, we want to print + # it out as a pretty list + if isinstance(obj._meta.get_field(fieldname), models.ManyToManyField): + # XXX: Changes to ManyToMany fields can't be tracked here :( + # For now, we have no good way to deal with it so, well, don't. + # (trying to get the value will return None for it) + return '' - # Return the value, or an empty tring if it's NULL (migrated records) - return unicode(getattr(obj, fieldname)) or '' + # Return the value, or an empty tring if it's NULL (migrated records) + return unicode(getattr(obj, fieldname)) or '' def _get_full_text_representation(obj): - fieldlist = _get_all_notification_fields(obj) - if not fieldlist: - return "This object does not know how to express itself." + fieldlist = _get_all_notification_fields(obj) + if not fieldlist: + return "This object does not know how to express itself." - return "\n".join([u'%s: %s' % (n, _get_attr_value(obj, n)) for n in fieldlist]) + return "\n".join([u'%s: %s' % (n, _get_attr_value(obj, n)) for n in fieldlist]) def _get_notification_text(obj): - try: - oldobj = obj.__class__.objects.get(pk=obj.pk) - except obj.DoesNotExist: - return ('A new {0} has been added'.format(obj._meta.verbose_name), - _get_full_text_representation(obj)) + try: + oldobj = obj.__class__.objects.get(pk=obj.pk) + except obj.DoesNotExist: + return ('A new {0} has been added'.format(obj._meta.verbose_name), + _get_full_text_representation(obj)) - if hasattr(obj, 'approved'): - # This object has the capability to do approving. Apply the following logic: - # 1. If object was unapproved, and is still unapproved, don't send notification - # 2. If object was unapproved, and is now approved, send "object approved" notification - # 3. If object was approved, and is no longer approved, send "object unapproved" notification - # 4. (FIXME: configurable?) If object was approved and is still approved, send changes notification - if not obj.approved: - if not oldobj.approved: - # Was approved, still approved -> no notification - return (None, None) - # From approved to unapproved - return ('{0} id {1} has been unapproved'.format(obj._meta.verbose_name, obj.id), - _get_full_text_representation(obj)) - else: - if not oldobj.approved: - # Object went from unapproved to approved - return ('{0} id {1} has been approved'.format(obj._meta.verbose_name, obj.id), - _get_full_text_representation(obj)) - # Object contents have changed. Generate a diff! - diff = _get_full_text_diff(obj, oldobj) - if not diff: - return (None, None) - return ('{0} id {1} has been modified'.format(obj._meta.verbose_name, obj.id), - 'The following fields have been modified:\n\n%s' % diff) - else: - # If there is no approved field, but send_notifications was set - # to True, we notify on all changes. - diff = _get_full_text_diff(obj, oldobj) - if not diff: - return (None, None) - return ('{0} id {1} has been modified'.format(obj._meta.verbose_name, obj.id), - 'The following fields have been modified:\n\n%s' % diff) + if hasattr(obj, 'approved'): + # This object has the capability to do approving. Apply the following logic: + # 1. If object was unapproved, and is still unapproved, don't send notification + # 2. If object was unapproved, and is now approved, send "object approved" notification + # 3. If object was approved, and is no longer approved, send "object unapproved" notification + # 4. (FIXME: configurable?) If object was approved and is still approved, send changes notification + if not obj.approved: + if not oldobj.approved: + # Was approved, still approved -> no notification + return (None, None) + # From approved to unapproved + return ('{0} id {1} has been unapproved'.format(obj._meta.verbose_name, obj.id), + _get_full_text_representation(obj)) + else: + if not oldobj.approved: + # Object went from unapproved to approved + return ('{0} id {1} has been approved'.format(obj._meta.verbose_name, obj.id), + _get_full_text_representation(obj)) + # Object contents have changed. Generate a diff! + diff = _get_full_text_diff(obj, oldobj) + if not diff: + return (None, None) + return ('{0} id {1} has been modified'.format(obj._meta.verbose_name, obj.id), + 'The following fields have been modified:\n\n%s' % diff) + else: + # If there is no approved field, but send_notifications was set + # to True, we notify on all changes. + diff = _get_full_text_diff(obj, oldobj) + if not diff: + return (None, None) + return ('{0} id {1} has been modified'.format(obj._meta.verbose_name, obj.id), + 'The following fields have been modified:\n\n%s' % diff) def my_pre_save_handler(sender, **kwargs): - instance = kwargs['instance'] - if getattr(instance, 'send_notification', False) and get_current_user(): - (subj, cont) = _get_notification_text(instance) - if cont: - cont = _build_url(instance) + "\n\n" + cont - send_simple_mail(settings.NOTIFICATION_FROM, - settings.NOTIFICATION_EMAIL, - "%s by %s" % (subj, get_current_user()), - cont) + instance = kwargs['instance'] + if getattr(instance, 'send_notification', False) and get_current_user(): + (subj, cont) = _get_notification_text(instance) + if cont: + cont = _build_url(instance) + "\n\n" + cont + send_simple_mail(settings.NOTIFICATION_FROM, + settings.NOTIFICATION_EMAIL, + "%s by %s" % (subj, get_current_user()), + cont) def my_m2m_changed_handler(sender, **kwargs): - instance = kwargs['instance'] - if getattr(instance, 'send_m2m_notification', False) and get_current_user(): - (cl, f) = sender.__name__.split('_') - if not hasattr(instance, '_stored_m2m'): - instance._stored_m2m={} - if kwargs['action'] == 'pre_clear': - instance._stored_m2m[f] = set([unicode(t) for t in getattr(instance,f).all()]) - elif kwargs['action'] == 'post_add': - newset = set([unicode(t) for t in getattr(instance,f).all()]) - added = newset.difference(instance._stored_m2m.get(f, set())) - removed = instance._stored_m2m.get(f, set()).difference(newset) - subj = '{0} id {1} has been modified'.format(instance._meta.verbose_name, instance.id) - if added or removed: - send_simple_mail(settings.NOTIFICATION_FROM, - settings.NOTIFICATION_EMAIL, - "%s by %s" % (subj, get_current_user()), - "The following values for {0} were changed:\n\n{1}\n{2}\n\n".format( - instance._meta.get_field(f).verbose_name, - "\n".join([u"Added: %s" % a for a in added]), - "\n".join([u"Removed: %s" % r for r in removed]), - )) + instance = kwargs['instance'] + if getattr(instance, 'send_m2m_notification', False) and get_current_user(): + (cl, f) = sender.__name__.split('_') + if not hasattr(instance, '_stored_m2m'): + instance._stored_m2m={} + if kwargs['action'] == 'pre_clear': + instance._stored_m2m[f] = set([unicode(t) for t in getattr(instance,f).all()]) + elif kwargs['action'] == 'post_add': + newset = set([unicode(t) for t in getattr(instance,f).all()]) + added = newset.difference(instance._stored_m2m.get(f, set())) + removed = instance._stored_m2m.get(f, set()).difference(newset) + subj = '{0} id {1} has been modified'.format(instance._meta.verbose_name, instance.id) + if added or removed: + send_simple_mail(settings.NOTIFICATION_FROM, + settings.NOTIFICATION_EMAIL, + "%s by %s" % (subj, get_current_user()), + "The following values for {0} were changed:\n\n{1}\n{2}\n\n".format( + instance._meta.get_field(f).verbose_name, + "\n".join([u"Added: %s" % a for a in added]), + "\n".join([u"Removed: %s" % r for r in removed]), + )) def my_pre_delete_handler(sender, **kwargs): - instance = kwargs['instance'] - if getattr(instance, 'send_notification', False) and get_current_user(): - send_simple_mail(settings.NOTIFICATION_FROM, - settings.NOTIFICATION_EMAIL, - "%s id %s has been deleted by %s" % ( - instance._meta.verbose_name, - instance.id, - get_current_user()), - _get_full_text_representation(instance)) + instance = kwargs['instance'] + if getattr(instance, 'send_notification', False) and get_current_user(): + send_simple_mail(settings.NOTIFICATION_FROM, + settings.NOTIFICATION_EMAIL, + "%s id %s has been deleted by %s" % ( + instance._meta.verbose_name, + instance.id, + get_current_user()), + _get_full_text_representation(instance)) def my_post_save_handler(sender, **kwargs): - instance = kwargs['instance'] - if hasattr(instance, 'purge_urls'): - if callable(instance.purge_urls): - purgelist = instance.purge_urls() - else: - purgelist = instance.purge_urls - map(varnish_purge, purgelist) + instance = kwargs['instance'] + if hasattr(instance, 'purge_urls'): + if callable(instance.purge_urls): + purgelist = instance.purge_urls() + else: + purgelist = instance.purge_urls + map(varnish_purge, purgelist) def register_basic_signal_handlers(): - pre_save.connect(my_pre_save_handler) - pre_delete.connect(my_pre_delete_handler) - post_save.connect(my_post_save_handler) - m2m_changed.connect(my_m2m_changed_handler) + pre_save.connect(my_pre_save_handler) + pre_delete.connect(my_pre_delete_handler) + post_save.connect(my_post_save_handler) + m2m_changed.connect(my_m2m_changed_handler) diff --git a/pgweb/util/sitestruct.py b/pgweb/util/sitestruct.py index be3cdec2..64c04f15 100644 --- a/pgweb/util/sitestruct.py +++ b/pgweb/util/sitestruct.py @@ -1,22 +1,22 @@ from django.conf import settings def get_all_pages_struct(method='get_struct'): - """ - Return an iterator over all distinct pages on the site. - Each page is returned as a tuple consisting of: - (url, search weight, last_modified) + """ + Return an iterator over all distinct pages on the site. + Each page is returned as a tuple consisting of: + (url, search weight, last_modified) - It will do so by looking for the module "struct" in all - installed applications, and calling the get_struct() function - in all such modules. - """ - for app in settings.INSTALLED_APPS: - if app.startswith('pgweb.'): - try: - m = __import__(app+".struct", {}, {}, method) - except: - # Failed to import - probably module didnd't exist - continue + It will do so by looking for the module "struct" in all + installed applications, and calling the get_struct() function + in all such modules. + """ + for app in settings.INSTALLED_APPS: + if app.startswith('pgweb.'): + try: + m = __import__(app+".struct", {}, {}, method) + except: + # Failed to import - probably module didnd't exist + continue - if hasattr(m, method): - for x in getattr(m, method)(): yield x + if hasattr(m, method): + for x in getattr(m, method)(): yield x diff --git a/pgweb/util/templateloader.py b/pgweb/util/templateloader.py index a2fab276..20b45cae 100644 --- a/pgweb/util/templateloader.py +++ b/pgweb/util/templateloader.py @@ -10,15 +10,15 @@ except ImportError: _thread_locals = local() def initialize_template_collection(): - _thread_locals.templates = [] + _thread_locals.templates = [] def get_all_templates(): - return getattr(_thread_locals, 'templates', []) + return getattr(_thread_locals, 'templates', []) class TrackingTemplateLoader(django.template.loaders.base.Loader): - def get_template_sources(self, template_name): - _thread_locals.templates = getattr(_thread_locals, 'templates', []) + [template_name, ] - yield Origin(None) + def get_template_sources(self, template_name): + _thread_locals.templates = getattr(_thread_locals, 'templates', []) + [template_name, ] + yield Origin(None) - def get_contents(self, origin): - raise TemplateDoesNotExist(origin) + def get_contents(self, origin): + raise TemplateDoesNotExist(origin) diff --git a/tools/communityauth/generate_cryptkey.py b/tools/communityauth/generate_cryptkey.py index e39cb0b4..db7c7451 100755 --- a/tools/communityauth/generate_cryptkey.py +++ b/tools/communityauth/generate_cryptkey.py @@ -9,12 +9,12 @@ from Crypto import Random import base64 if __name__ == "__main__": - print "The next row contains a 32-byte (256-bit) symmetric crypto key." - print "This key should be used to integrate a community auth site." - print "Note that each site should have it's own key!!" - print "" + print "The next row contains a 32-byte (256-bit) symmetric crypto key." + print "This key should be used to integrate a community auth site." + print "Note that each site should have it's own key!!" + print "" - r = Random.new() - key = r.read(32) - print base64.b64encode(key) - + r = Random.new() + key = r.read(32) + print base64.b64encode(key) + diff --git a/tools/communityauth/sample/django/auth.py b/tools/communityauth/sample/django/auth.py index 5bdacc2c..2ae543a6 100644 --- a/tools/communityauth/sample/django/auth.py +++ b/tools/communityauth/sample/django/auth.py @@ -36,10 +36,10 @@ from Crypto import Random import time class AuthBackend(ModelBackend): - # We declare a fake backend that always fails direct authentication - - # since we should never be using direct authentication in the first place! - def authenticate(self, username=None, password=None): - raise Exception("Direct authentication not supported") + # We declare a fake backend that always fails direct authentication - + # since we should never be using direct authentication in the first place! + def authenticate(self, username=None, password=None): + raise Exception("Direct authentication not supported") #### @@ -48,90 +48,90 @@ class AuthBackend(ModelBackend): # Handle login requests by sending them off to the main site def login(request): - if not hasattr(settings, 'PGAUTH_REDIRECT'): - # No pgauth installed, so allow local installs. - from django.contrib.auth.views import login - return login(request, template_name='admin.html') + if not hasattr(settings, 'PGAUTH_REDIRECT'): + # No pgauth installed, so allow local installs. + from django.contrib.auth.views import login + return login(request, template_name='admin.html') - if request.GET.has_key('next'): - # Put together an url-encoded dict of parameters we're getting back, - # including a small nonce at the beginning to make sure it doesn't - # encrypt the same way every time. - s = "t=%s&%s" % (int(time.time()), urllib.urlencode({'r': request.GET['next']})) - # Now encrypt it - r = Random.new() - iv = r.read(16) - encryptor = AES.new(SHA.new(settings.SECRET_KEY).digest()[:16], AES.MODE_CBC, iv) - cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) # pad to 16 bytes + if request.GET.has_key('next'): + # Put together an url-encoded dict of parameters we're getting back, + # including a small nonce at the beginning to make sure it doesn't + # encrypt the same way every time. + s = "t=%s&%s" % (int(time.time()), urllib.urlencode({'r': request.GET['next']})) + # Now encrypt it + r = Random.new() + iv = r.read(16) + encryptor = AES.new(SHA.new(settings.SECRET_KEY).digest()[:16], AES.MODE_CBC, iv) + cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) # pad to 16 bytes - return HttpResponseRedirect("%s?d=%s$%s" % ( - settings.PGAUTH_REDIRECT, - base64.b64encode(iv, "-_"), - base64.b64encode(cipher, "-_"), - )) - else: - return HttpResponseRedirect(settings.PGAUTH_REDIRECT) + return HttpResponseRedirect("%s?d=%s$%s" % ( + settings.PGAUTH_REDIRECT, + base64.b64encode(iv, "-_"), + base64.b64encode(cipher, "-_"), + )) + else: + return HttpResponseRedirect(settings.PGAUTH_REDIRECT) # Handle logout requests by logging out of this site and then # redirecting to log out from the main site as well. def logout(request): - if request.user.is_authenticated(): - django_logout(request) - return HttpResponseRedirect("%slogout/" % settings.PGAUTH_REDIRECT) + if request.user.is_authenticated(): + django_logout(request) + return HttpResponseRedirect("%slogout/" % settings.PGAUTH_REDIRECT) # Receive an authentication response from the main website and try # to log the user in. def auth_receive(request): - if request.GET.has_key('s') and request.GET['s'] == "logout": - # This was a logout request - return HttpResponseRedirect('/') + if request.GET.has_key('s') and request.GET['s'] == "logout": + # This was a logout request + return HttpResponseRedirect('/') - if not request.GET.has_key('i'): - return HttpResponse("Missing IV in url!", status=400) - if not request.GET.has_key('d'): - return HttpResponse("Missing data in url!", status=400) + if not request.GET.has_key('i'): + return HttpResponse("Missing IV in url!", status=400) + if not request.GET.has_key('d'): + return HttpResponse("Missing data in url!", status=400) - # Set up an AES object and decrypt the data we received - decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY), - AES.MODE_CBC, - base64.b64decode(str(request.GET['i']), "-_")) - s = decryptor.decrypt(base64.b64decode(str(request.GET['d']), "-_")).rstrip(' ') + # Set up an AES object and decrypt the data we received + decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY), + AES.MODE_CBC, + base64.b64decode(str(request.GET['i']), "-_")) + s = decryptor.decrypt(base64.b64decode(str(request.GET['d']), "-_")).rstrip(' ') - # Now un-urlencode it - try: - data = urlparse.parse_qs(s, strict_parsing=True) - except ValueError: - return HttpResponse("Invalid encrypted data received.", status=400) + # Now un-urlencode it + try: + data = urlparse.parse_qs(s, strict_parsing=True) + except ValueError: + return HttpResponse("Invalid encrypted data received.", status=400) - # Check the timestamp in the authentication - if (int(data['t'][0]) < time.time() - 10): - return HttpResponse("Authentication token too old.", status=400) + # Check the timestamp in the authentication + if (int(data['t'][0]) < time.time() - 10): + return HttpResponse("Authentication token too old.", status=400) - # Update the user record (if any) - try: - user = User.objects.get(username=data['u'][0]) - # User found, let's see if any important fields have changed - changed = False - if user.first_name != data['f'][0]: - user.first_name = data['f'][0] - changed = True - if user.last_name != data['l'][0]: - user.last_name = data['l'][0] - changed = True - if user.email != data['e'][0]: - user.email = data['e'][0] - changed= True - if changed: - user.save() - except User.DoesNotExist: - # User not found, create it! + # Update the user record (if any) + try: + user = User.objects.get(username=data['u'][0]) + # User found, let's see if any important fields have changed + changed = False + if user.first_name != data['f'][0]: + user.first_name = data['f'][0] + changed = True + if user.last_name != data['l'][0]: + user.last_name = data['l'][0] + changed = True + if user.email != data['e'][0]: + user.email = data['e'][0] + changed= True + if changed: + user.save() + except User.DoesNotExist: + # User not found, create it! - # NOTE! We have some legacy users where there is a user in - # the database with a different userid. Instead of trying to - # somehow fix that live, give a proper error message and - # have somebody look at it manually. - if User.objects.filter(email=data['e'][0]).exists(): - return HttpResponse("""A user with email %s already exists, but with + # NOTE! We have some legacy users where there is a user in + # the database with a different userid. Instead of trying to + # somehow fix that live, give a proper error message and + # have somebody look at it manually. + if User.objects.filter(email=data['e'][0]).exists(): + return HttpResponse("""A user with email %s already exists, but with a different username than %s. This is almost certainly caused by some legacy data in our database. @@ -142,51 +142,51 @@ for you. We apologize for the inconvenience. """ % (data['e'][0], data['u'][0]), content_type='text/plain') - if hasattr(settings, 'PGAUTH_CREATEUSER_CALLBACK'): - res = getattr(settings, 'PGAUTH_CREATEUSER_CALLBACK')( - data['u'][0], - data['e'][0], - ['f'][0], - data['l'][0], - ) - # If anything is returned, we'll return that as our result. - # If None is returned, it means go ahead and create the user. - if res: - return res + if hasattr(settings, 'PGAUTH_CREATEUSER_CALLBACK'): + res = getattr(settings, 'PGAUTH_CREATEUSER_CALLBACK')( + data['u'][0], + data['e'][0], + ['f'][0], + data['l'][0], + ) + # If anything is returned, we'll return that as our result. + # If None is returned, it means go ahead and create the user. + if res: + return res - user = User(username=data['u'][0], - first_name=data['f'][0], - last_name=data['l'][0], - email=data['e'][0], - password='setbypluginnotasha1', - ) - user.save() + user = User(username=data['u'][0], + first_name=data['f'][0], + last_name=data['l'][0], + email=data['e'][0], + password='setbypluginnotasha1', + ) + user.save() - # Ok, we have a proper user record. Now tell django that - # we're authenticated so it persists it in the session. Before - # we do that, we have to annotate it with the backend information. - user.backend = "%s.%s" % (AuthBackend.__module__, AuthBackend.__name__) - django_login(request, user) + # Ok, we have a proper user record. Now tell django that + # we're authenticated so it persists it in the session. Before + # we do that, we have to annotate it with the backend information. + user.backend = "%s.%s" % (AuthBackend.__module__, AuthBackend.__name__) + django_login(request, user) - # Finally, check of we have a data package that tells us where to - # redirect the user. - if data.has_key('d'): - (ivs, datas) = data['d'][0].split('$') - decryptor = AES.new(SHA.new(settings.SECRET_KEY).digest()[:16], - AES.MODE_CBC, - base64.b64decode(ivs, "-_")) - s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(' ') - try: - rdata = urlparse.parse_qs(s, strict_parsing=True) - except ValueError: - return HttpResponse("Invalid encrypted data received.", status=400) - if rdata.has_key('r'): - # Redirect address - return HttpResponseRedirect(rdata['r'][0]) - # No redirect specified, see if we have it in our settings - if hasattr(settings, 'PGAUTH_REDIRECT_SUCCESS'): - return HttpResponseRedirect(settings.PGAUTH_REDIRECT_SUCCESS) - return HttpResponse("Authentication successful, but don't know where to redirect!", status=500) + # Finally, check of we have a data package that tells us where to + # redirect the user. + if data.has_key('d'): + (ivs, datas) = data['d'][0].split('$') + decryptor = AES.new(SHA.new(settings.SECRET_KEY).digest()[:16], + AES.MODE_CBC, + base64.b64decode(ivs, "-_")) + s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(' ') + try: + rdata = urlparse.parse_qs(s, strict_parsing=True) + except ValueError: + return HttpResponse("Invalid encrypted data received.", status=400) + if rdata.has_key('r'): + # Redirect address + return HttpResponseRedirect(rdata['r'][0]) + # No redirect specified, see if we have it in our settings + if hasattr(settings, 'PGAUTH_REDIRECT_SUCCESS'): + return HttpResponseRedirect(settings.PGAUTH_REDIRECT_SUCCESS) + return HttpResponse("Authentication successful, but don't know where to redirect!", status=500) # Perform a search in the central system. Note that the results are returned as an @@ -197,29 +197,29 @@ We apologize for the inconvenience. # Unlike the authentication, searching does not involve the browser - we just make # a direct http call. def user_search(searchterm=None, userid=None): - # If upstream isn't responding quickly, it's not going to respond at all, and - # 10 seconds is already quite long. - socket.setdefaulttimeout(10) - if userid: - q = {'u': userid} - else: - q = {'s': searchterm} + # If upstream isn't responding quickly, it's not going to respond at all, and + # 10 seconds is already quite long. + socket.setdefaulttimeout(10) + if userid: + q = {'u': userid} + else: + q = {'s': searchterm} - u = urllib.urlopen('%ssearch/?%s' % ( - settings.PGAUTH_REDIRECT, - urllib.urlencode(q), - )) - (ivs, datas) = u.read().split('&') - u.close() + u = urllib.urlopen('%ssearch/?%s' % ( + settings.PGAUTH_REDIRECT, + urllib.urlencode(q), + )) + (ivs, datas) = u.read().split('&') + u.close() - # Decryption time - decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY), - AES.MODE_CBC, - base64.b64decode(ivs, "-_")) - s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(' ') - j = json.loads(s) + # Decryption time + decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY), + AES.MODE_CBC, + base64.b64decode(ivs, "-_")) + s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(' ') + j = json.loads(s) - return j + return j # Import a user into the local authentication system. Will initially # make a search for it, and if anything other than one entry is returned @@ -230,18 +230,18 @@ def user_search(searchterm=None, userid=None): # The call to this function should normally be wrapped in a transaction, # and this function itself will make no attempt to do anything about that. def user_import(uid): - u = user_search(userid=uid) - if len(u) != 1: - raise Exception("Internal error, duplicate or no user found") + u = user_search(userid=uid) + if len(u) != 1: + raise Exception("Internal error, duplicate or no user found") - u = u[0] + u = u[0] - if User.objects.filter(username=u['u']).exists(): - raise Exception("User already exists") + if User.objects.filter(username=u['u']).exists(): + raise Exception("User already exists") - User(username=u['u'], - first_name=u['f'], - last_name=u['l'], - email=u['e'], - password='setbypluginnotsha1', - ).save() + User(username=u['u'], + first_name=u['f'], + last_name=u['l'], + email=u['e'], + password='setbypluginnotsha1', + ).save() diff --git a/tools/communityauth/test_auth.py b/tools/communityauth/test_auth.py index b003b4e6..d5c60c71 100755 --- a/tools/communityauth/test_auth.py +++ b/tools/communityauth/test_auth.py @@ -15,54 +15,54 @@ from optparse import OptionParser if __name__ == "__main__": - parser = OptionParser() - parser.add_option("-k", "--key", dest="key") - parser.add_option("-u", "--user", dest="user") - parser.add_option("-f", "--first", dest="first") - parser.add_option("-l", "--last", dest="last") - parser.add_option("-e", "--email", dest="email") - parser.add_option("-s", "--suburl", dest="suburl") + parser = OptionParser() + parser.add_option("-k", "--key", dest="key") + parser.add_option("-u", "--user", dest="user") + parser.add_option("-f", "--first", dest="first") + parser.add_option("-l", "--last", dest="last") + parser.add_option("-e", "--email", dest="email") + parser.add_option("-s", "--suburl", dest="suburl") - (options, args) = parser.parse_args() + (options, args) = parser.parse_args() - if len(args) != 0: - parser.print_usage() - sys.exit(1) + if len(args) != 0: + parser.print_usage() + sys.exit(1) - if not options.key: - options.key = raw_input("Enter key (BASE64 encoded): ") - if not options.user: - options.user = raw_input("Enter username: ") - if not options.first: - options.first = "FirstName" - if not options.last: - options.last = "LastName" - if not options.email: - options.email = "test@example.com" + if not options.key: + options.key = raw_input("Enter key (BASE64 encoded): ") + if not options.user: + options.user = raw_input("Enter username: ") + if not options.first: + options.first = "FirstName" + if not options.last: + options.last = "LastName" + if not options.email: + options.email = "test@example.com" - # This is basically a rip of the view in accounts/views.py - info = { - 'u': options.user, - 'f': options.first, - 'l': options.last, - 'e': options.email, - } - if options.suburl: - info['su'] = options.suburl + # This is basically a rip of the view in accounts/views.py + info = { + 'u': options.user, + 'f': options.first, + 'l': options.last, + 'e': options.email, + } + if options.suburl: + info['su'] = options.suburl - # Turn this into an URL. Make sure the timestamp is always first, that makes - # the first block more random.. - # Since this is a fake authentication, put it 5 minutes into the future to - # give more time to copy/paste it. - s = "t=%s&%s" % (int(time.time()+300), urllib.urlencode(info)) + # Turn this into an URL. Make sure the timestamp is always first, that makes + # the first block more random.. + # Since this is a fake authentication, put it 5 minutes into the future to + # give more time to copy/paste it. + s = "t=%s&%s" % (int(time.time()+300), urllib.urlencode(info)) - r = Random.new() - iv = r.read(16) - encryptor = AES.new(base64.b64decode(options.key), AES.MODE_CBC, iv) - cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) + r = Random.new() + iv = r.read(16) + encryptor = AES.new(base64.b64decode(options.key), AES.MODE_CBC, iv) + cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) - print "Paste the following after the receiving url:" - print "?i=%s&d=%s" % ( - base64.b64encode(iv, "-_"), - base64.b64encode(cipher, "-_"), - ) + print "Paste the following after the receiving url:" + print "?i=%s&d=%s" % ( + base64.b64encode(iv, "-_"), + base64.b64encode(cipher, "-_"), + ) diff --git a/tools/docs/docload.py b/tools/docs/docload.py index 2965f9c8..1d467d37 100755 --- a/tools/docs/docload.py +++ b/tools/docs/docload.py @@ -19,62 +19,62 @@ re_titlematch = re.compile('([^<]+)', re.IGNORECASE) ## Load a single page def load_doc_file(filename, f): - tidyopts = dict(drop_proprietary_attributes=1, - alt_text='', - hide_comments=1, - output_xhtml=1, - show_body_only=1, - clean=1, - char_encoding='utf8', - indent='auto', - ) + tidyopts = dict(drop_proprietary_attributes=1, + alt_text='', + hide_comments=1, + output_xhtml=1, + show_body_only=1, + clean=1, + char_encoding='utf8', + indent='auto', + ) - # Postgres 10 started using xml toolchain and now produces docmentation in utf8. So we need - # to figure out which version it is. - rawcontents = f.read() - if rawcontents.startswith(' 0: - tidyopts['indent'] = 'no' + # PostgreSQL prior to 11 used an older toolchain to build the docs, which does not support + # indented HTML. So turn it off on those, but keep it on the newer versions where it works, + # because it makes things a lot easier to debug. + if float(ver) < 11 and float(ver) > 0: + tidyopts['indent'] = 'no' - contents = unicode(rawcontents, encoding) + contents = unicode(rawcontents, encoding) - tm = re_titlematch.search(contents) - if tm: - title = tm.group(1) - else: - title = "" - if not quiet: print "--- file: %s (%s) ---" % (filename, title) + tm = re_titlematch.search(contents) + if tm: + title = tm.group(1) + else: + title = "" + if not quiet: print "--- file: %s (%s) ---" % (filename, title) - s = tidy.parseString(contents.encode('utf-8'), **tidyopts) - curs.execute("INSERT INTO docs (file, version, title, content) VALUES (%(f)s, %(v)s, %(t)s, %(c)s)",{ - 'f': filename, - 'v': ver, - 't': title, - 'c': str(s), - }) - global pagecount - pagecount += 1 + s = tidy.parseString(contents.encode('utf-8'), **tidyopts) + curs.execute("INSERT INTO docs (file, version, title, content) VALUES (%(f)s, %(v)s, %(t)s, %(c)s)",{ + 'f': filename, + 'v': ver, + 't': title, + 'c': str(s), + }) + global pagecount + pagecount += 1 ## Main execution parser = OptionParser(usage="usage: %prog [options] ") parser.add_option("-q", "--quiet", action="store_true", dest="quiet", - help="Run quietly") + help="Run quietly") (options, args) = parser.parse_args() if len(args) != 2: - parser.print_usage() - sys.exit(1) + parser.print_usage() + sys.exit(1) quiet = options.quiet ver = sys.argv[1] @@ -84,8 +84,8 @@ config = ConfigParser() config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'docload.ini')) if not os.path.isfile(tarfilename): - print "File %s not found" % tarfilename - sys.exit(1) + print "File %s not found" % tarfilename + sys.exit(1) tf = tarfile.open(tarfilename) @@ -96,8 +96,8 @@ curs = connection.cursor() curs.execute("SELECT current FROM core_version WHERE tree=%(v)s", {'v': ver}) r = curs.fetchall() if len(r) != 1: - print "Version %s not found in the system, cannot load!" % ver - sys.exit(1) + print "Version %s not found in the system, cannot load!" % ver + sys.exit(1) iscurrent = r[0][0] @@ -109,18 +109,18 @@ curs.execute("DELETE FROM docs WHERE version=%(v)s", {'v': ver}) re_htmlfile = re.compile('[^/]*/doc/src/sgml/html/.*') re_tarfile = re.compile('[^/]*/doc/postgres.tar.gz$') for member in tf: - if re_htmlfile.match(member.name): - load_doc_file(os.path.basename(member.name), tf.extractfile(member)) - if re_tarfile.match(member.name): - f = tf.extractfile(member) - inner_tar = tarfile.open(fileobj=f) - for inner_member in inner_tar: - # Some old versions have index.html as a symlink - so let's - # just ignore all symlinks to be on the safe side. - if inner_member.issym(): continue + if re_htmlfile.match(member.name): + load_doc_file(os.path.basename(member.name), tf.extractfile(member)) + if re_tarfile.match(member.name): + f = tf.extractfile(member) + inner_tar = tarfile.open(fileobj=f) + for inner_member in inner_tar: + # Some old versions have index.html as a symlink - so let's + # just ignore all symlinks to be on the safe side. + if inner_member.issym(): continue - if inner_member.name.endswith('.html') or inner_member.name.endswith('.htm'): - load_doc_file(inner_member.name, inner_tar.extractfile(inner_member)) + if inner_member.name.endswith('.html') or inner_member.name.endswith('.htm'): + load_doc_file(inner_member.name, inner_tar.extractfile(inner_member)) tf.close() # Update the docs loaded timestamp @@ -128,12 +128,12 @@ curs.execute("UPDATE core_version SET docsloaded=CURRENT_TIMESTAMP WHERE tree=%( # Issue varnish purge for all docs of this version if ver == "0": - # Special handling of developer docs... - ver = "devel" + # Special handling of developer docs... + ver = "devel" curs.execute("SELECT varnish_purge('^/docs/' || %(v)s || '/')", {'v': ver}) if iscurrent: - curs.execute("SELECT varnish_purge('^/docs/current/')") + curs.execute("SELECT varnish_purge('^/docs/current/')") connection.commit() connection.close() diff --git a/tools/ftp/spider_ftp.py b/tools/ftp/spider_ftp.py index d601cfc0..abaef4fc 100755 --- a/tools/ftp/spider_ftp.py +++ b/tools/ftp/spider_ftp.py @@ -20,71 +20,71 @@ exclude_roots = ['/repos', ] allnodes = {} def read_file(fn): - f = codecs.open(fn, 'r', encoding='utf-8', errors='replace') - t = f.read() - f.close() - return t + f = codecs.open(fn, 'r', encoding='utf-8', errors='replace') + t = f.read() + f.close() + return t def parse_directory(dirname, rootlen): - mynode = {} - for f in os.listdir(dirname): - if f.startswith(".") and not f == ".message": continue - if f == "sync_timestamp": continue + mynode = {} + for f in os.listdir(dirname): + if f.startswith(".") and not f == ".message": continue + if f == "sync_timestamp": continue - fn = os.path.join(dirname, f) - if os.path.isdir(fn): - # Can be a directory itself, or a symbolic link to a directory - if os.path.islink(fn): - # This is a symbolic link - mynode[f] = { - 't': 'l', - 'd': os.readlink(fn).strip("/"), - } - else: - # This is a subdirectory, recurse into it, unless it happens - # to be on our exclude list. - if not fn[rootlen:] in exclude_roots: - parse_directory(fn, rootlen) - mynode[f] = { - 't': 'd', - } - else: - # This a file - stat = os.stat(fn) - mynode[f] = { - 't': 'f', - 's': stat.st_size, - 'd': datetime.fromtimestamp(stat.st_mtime), - } - if f == "README" or f == "CURRENT_MAINTAINER" or f == ".message": - mynode[f]['c'] = read_file(fn) + fn = os.path.join(dirname, f) + if os.path.isdir(fn): + # Can be a directory itself, or a symbolic link to a directory + if os.path.islink(fn): + # This is a symbolic link + mynode[f] = { + 't': 'l', + 'd': os.readlink(fn).strip("/"), + } + else: + # This is a subdirectory, recurse into it, unless it happens + # to be on our exclude list. + if not fn[rootlen:] in exclude_roots: + parse_directory(fn, rootlen) + mynode[f] = { + 't': 'd', + } + else: + # This a file + stat = os.stat(fn) + mynode[f] = { + 't': 'f', + 's': stat.st_size, + 'd': datetime.fromtimestamp(stat.st_mtime), + } + if f == "README" or f == "CURRENT_MAINTAINER" or f == ".message": + mynode[f]['c'] = read_file(fn) - allnodes[dirname[rootlen:].strip("/")] = mynode + allnodes[dirname[rootlen:].strip("/")] = mynode def Usage(): - print "Usage: spider_ftp.py " - print "" - print "If starts with http[s]://, the file will be uploaded" - print "to that URL instead of written to the filesystem." - sys.exit(1) + print "Usage: spider_ftp.py " + print "" + print "If starts with http[s]://, the file will be uploaded" + print "to that URL instead of written to the filesystem." + sys.exit(1) if len(sys.argv) != 3: Usage() parse_directory(sys.argv[1], len(sys.argv[1])) if sys.argv[2].startswith("http://") or sys.argv[2].startswith("https://"): - o = urllib2.build_opener(urllib2.HTTPHandler) - r = urllib2.Request(sys.argv[2], data=pickle.dumps(allnodes)) - r.add_header('Content-type', 'application/octet-stream') - r.add_header('Host', 'www.postgresql.org') - r.get_method = lambda: 'PUT' - u = o.open(r) - x = u.read() - if x != "NOT CHANGED" and x != "OK": - print "Failed to upload: %s" % x - sys.exit(1) + o = urllib2.build_opener(urllib2.HTTPHandler) + r = urllib2.Request(sys.argv[2], data=pickle.dumps(allnodes)) + r.add_header('Content-type', 'application/octet-stream') + r.add_header('Host', 'www.postgresql.org') + r.get_method = lambda: 'PUT' + u = o.open(r) + x = u.read() + if x != "NOT CHANGED" and x != "OK": + print "Failed to upload: %s" % x + sys.exit(1) else: - f = open(sys.argv[2] + ".tmp", "wb") - pickle.dump(allnodes, f) - f.close() - os.rename(sys.argv[2] + ".tmp", sys.argv[2]) + f = open(sys.argv[2] + ".tmp", "wb") + pickle.dump(allnodes, f) + f.close() + os.rename(sys.argv[2] + ".tmp", sys.argv[2]) diff --git a/tools/ftp/spider_yum.py b/tools/ftp/spider_yum.py index cdfb8201..bc688d93 100755 --- a/tools/ftp/spider_yum.py +++ b/tools/ftp/spider_yum.py @@ -9,104 +9,104 @@ from decimal import Decimal from tempfile import NamedTemporaryFile platform_names = { - 'redhat': 'Red Hat Enterprise Linux {0}', - 'centos': 'CentOS {0}', - 'sl': 'Scientific Linux {0}', - 'fedora': 'Fedora {0}', - 'oraclelinux': 'Oracle Enterprise Linux {0}', - 'ami201503-': 'Amazon Linux AMI201503 {0}', + 'redhat': 'Red Hat Enterprise Linux {0}', + 'centos': 'CentOS {0}', + 'sl': 'Scientific Linux {0}', + 'fedora': 'Fedora {0}', + 'oraclelinux': 'Oracle Enterprise Linux {0}', + 'ami201503-': 'Amazon Linux AMI201503 {0}', } platform_sort = { - 'redhat': 1, - 'centos': 2, - 'sl': 3, - 'fedora': 4, - 'oraclelinux': 5, - 'ami201503-': 6, + 'redhat': 1, + 'centos': 2, + 'sl': 3, + 'fedora': 4, + 'oraclelinux': 5, + 'ami201503-': 6, } archs = ['x86_64', 'i386', 'i686', 'ppc64le'] def generate_platform(dirname, familyprefix, ver, installer, systemd): - for f in platform_names.keys(): - yield ('%s-%s' % (f, ver), { - 't': platform_names[f].format(ver), - 'p': os.path.join(dirname, '{0}-{1}'.format(familyprefix, ver)), - 'f': f, - 'i': installer, - 'd': systemd, - 's': platform_sort[f]*1000-ver, - 'found': False, - }) + for f in platform_names.keys(): + yield ('%s-%s' % (f, ver), { + 't': platform_names[f].format(ver), + 'p': os.path.join(dirname, '{0}-{1}'.format(familyprefix, ver)), + 'f': f, + 'i': installer, + 'd': systemd, + 's': platform_sort[f]*1000-ver, + 'found': False, + }) def get_redhat_systemd(ver): - return (ver >= 7) + return (ver >= 7) platforms = {} for v in range(5, 7+1): - platforms.update(dict(generate_platform('redhat', 'rhel', v, 'yum', get_redhat_systemd(v)))) + platforms.update(dict(generate_platform('redhat', 'rhel', v, 'yum', get_redhat_systemd(v)))) for v in range(24, 30+1): - platforms.update(dict(generate_platform('fedora', 'fedora', v, 'dnf', True))) + platforms.update(dict(generate_platform('fedora', 'fedora', v, 'dnf', True))) re_reporpm = re.compile('^pgdg-([a-z0-9-]+)([0-9]{2})-[^-]+-(\d+)\.noarch\.rpm$') re_versiondirs = re.compile(r'^\d+(\.\d+)?$') if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Spider repo RPMs") - parser.add_argument('yumroot', type=str, help='YUM root path') - parser.add_argument('target', type=str, help='Target URL or filename') + parser = argparse.ArgumentParser(description="Spider repo RPMs") + parser.add_argument('yumroot', type=str, help='YUM root path') + parser.add_argument('target', type=str, help='Target URL or filename') - args = parser.parse_args() + args = parser.parse_args() - versions = sorted([v for v in os.listdir(args.yumroot) if re_versiondirs.match(v)], key=Decimal, reverse=True) - reporpms = {} - for v in versions: - reporpms[v] = {} - vroot = os.path.join(args.yumroot, v) - for dirpath, dirnames, filenames in os.walk(vroot): - rmatches = filter(None, (re_reporpm.match(f) for f in sorted(filenames, reverse=True))) + versions = sorted([v for v in os.listdir(args.yumroot) if re_versiondirs.match(v)], key=Decimal, reverse=True) + reporpms = {} + for v in versions: + reporpms[v] = {} + vroot = os.path.join(args.yumroot, v) + for dirpath, dirnames, filenames in os.walk(vroot): + rmatches = filter(None, (re_reporpm.match(f) for f in sorted(filenames, reverse=True))) - if rmatches: - familypath = os.path.join(*dirpath.split('/')[-2:]) - (familypath, arch) = familypath.rsplit('-', 1) + if rmatches: + familypath = os.path.join(*dirpath.split('/')[-2:]) + (familypath, arch) = familypath.rsplit('-', 1) - for r in rmatches: - shortdist, shortver, ver = r.groups(1) + for r in rmatches: + shortdist, shortver, ver = r.groups(1) - found = False - for p, pinfo in platforms.items(): - if pinfo['p'] == familypath and pinfo['f'] == shortdist: - if not reporpms[v].has_key(p): - reporpms[v][p] = {} - reporpms[v][p][arch] = max(ver, reporpms[v][p].get(arch, 0)) - platforms[p]['found'] = True - break - else: - # DEBUG -# print "%s (%s) not found in platform list" % (familypath, shortdist) - pass + found = False + for p, pinfo in platforms.items(): + if pinfo['p'] == familypath and pinfo['f'] == shortdist: + if not reporpms[v].has_key(p): + reporpms[v][p] = {} + reporpms[v][p][arch] = max(ver, reporpms[v][p].get(arch, 0)) + platforms[p]['found'] = True + break + else: + # DEBUG +# print "%s (%s) not found in platform list" % (familypath, shortdist) + pass - # Filter all platforms that are not used - platforms = {k:v for k,v in platforms.iteritems() if v['found']} - for k,v in platforms.iteritems(): - del v['found'] + # Filter all platforms that are not used + platforms = {k:v for k,v in platforms.iteritems() if v['found']} + for k,v in platforms.iteritems(): + del v['found'] - j = json.dumps({'platforms': platforms, 'reporpms': reporpms}) + j = json.dumps({'platforms': platforms, 'reporpms': reporpms}) - if args.target.startswith('http://') or args.target.startswith('https://'): - o = urllib2.build_opener(urllib2.HTTPHandler) - r = urllib2.Request(sys.argv[2], data=j) - r.add_header('Content-type', 'application/json') - r.add_header('Host', 'www.postgresql.org') - r.get_method = lambda: 'PUT' - u = o.open(r) - x = u.read() - if x != "NOT CHANGED" and x != "OK": - print "Failed to upload: %s" % x - sys.exit(1) - else: - with NamedTemporaryFile(dir=os.path.dirname(os.path.abspath(args.target))) as f: - f.write(j) - f.flush() - if os.path.isfile(args.target): - os.unlink(args.target) - os.link(f.name, args.target) + if args.target.startswith('http://') or args.target.startswith('https://'): + o = urllib2.build_opener(urllib2.HTTPHandler) + r = urllib2.Request(sys.argv[2], data=j) + r.add_header('Content-type', 'application/json') + r.add_header('Host', 'www.postgresql.org') + r.get_method = lambda: 'PUT' + u = o.open(r) + x = u.read() + if x != "NOT CHANGED" and x != "OK": + print "Failed to upload: %s" % x + sys.exit(1) + else: + with NamedTemporaryFile(dir=os.path.dirname(os.path.abspath(args.target))) as f: + f.write(j) + f.flush() + if os.path.isfile(args.target): + os.unlink(args.target) + os.link(f.name, args.target) diff --git a/tools/localhtmlvalidate/localhtmlvalidate.py b/tools/localhtmlvalidate/localhtmlvalidate.py index f27e6487..ff08ae48 100755 --- a/tools/localhtmlvalidate/localhtmlvalidate.py +++ b/tools/localhtmlvalidate/localhtmlvalidate.py @@ -22,75 +22,75 @@ import HTMLParser BOUNDARY="-=--=foobar-=--=" def encode_multipart_formdata(fields, files): - L = [] - for (key, value) in fields: - L.append('--' + BOUNDARY) - L.append('Content-Disposition: form-data; name="%s"' % key) - L.append('') - L.append(value) - for (key, filename, value) in files: - L.append('--' + BOUNDARY) - L.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)) - L.append('Content-Type: text/html') - L.append('') - L.append(value) - L.append('--' + BOUNDARY + '--') - L.append('') - body = "\r\n".join(L) - return body + L = [] + for (key, value) in fields: + L.append('--' + BOUNDARY) + L.append('Content-Disposition: form-data; name="%s"' % key) + L.append('') + L.append(value) + for (key, filename, value) in files: + L.append('--' + BOUNDARY) + L.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)) + L.append('Content-Type: text/html') + L.append('') + L.append(value) + L.append('--' + BOUNDARY + '--') + L.append('') + body = "\r\n".join(L) + return body if __name__=="__main__": - if len(sys.argv) != 2: - print "Usage: localhtmlvalidate.py " - sys.exit(1) + if len(sys.argv) != 2: + print "Usage: localhtmlvalidate.py " + sys.exit(1) - contents = urllib.urlopen(sys.argv[1]).read() + contents = urllib.urlopen(sys.argv[1]).read() - # Try to figure out where the actual contents start :) - try: - firstline = contents.splitlines().index('

') - except ValueError: - firstline = 0 + # Try to figure out where the actual contents start :) + try: + firstline = contents.splitlines().index('
') + except ValueError: + firstline = 0 - # Generate a form body - body = encode_multipart_formdata([ - ('charset', 'utf-8'), - ('doctype', 'inline'), - ('group', '0'), - ('verbose', '1'), - ], - [('uploaded_file', 'test.html', contents)]) + # Generate a form body + body = encode_multipart_formdata([ + ('charset', 'utf-8'), + ('doctype', 'inline'), + ('group', '0'), + ('verbose', '1'), + ], + [('uploaded_file', 'test.html', contents)]) - # Now submit it to the w3c validator - h = httplib.HTTP("validator.w3.org") - h.putrequest("POST", "/check") - h.putheader("User-Agent: localcheck-tester/0.0") - h.putheader("content-type", "multipart/form-data; boundary=%s" % BOUNDARY) - h.putheader("content-length", str(len(body))) - h.endheaders() - h.send(body) - errcode, errmsg, headers = h.getreply() - rbody = h.getfile().read() - if headers['x-w3c-validator-status'] == 'Valid': - print "Page validates!" - sys.exit(0) - elif headers['x-w3c-validator-status'] == 'Invalid': - print "Invalid!" - print "Errors: %s" % headers['x-w3c-validator-errors'] - print "Warnings: %s" % headers['x-w3c-validator-warnings'] - hp = HTMLParser.HTMLParser() - for m in re.findall('
  • .*?
  • ', rbody, re.DOTALL): - r = re.search('Line (\d+).*(.*?)', m, re.DOTALL) - print "Line %s (should be around %s): %s" % (r.group(1), int(r.group(1)) - firstline, hp.unescape(r.group(2))) + # Now submit it to the w3c validator + h = httplib.HTTP("validator.w3.org") + h.putrequest("POST", "/check") + h.putheader("User-Agent: localcheck-tester/0.0") + h.putheader("content-type", "multipart/form-data; boundary=%s" % BOUNDARY) + h.putheader("content-length", str(len(body))) + h.endheaders() + h.send(body) + errcode, errmsg, headers = h.getreply() + rbody = h.getfile().read() + if headers['x-w3c-validator-status'] == 'Valid': + print "Page validates!" + sys.exit(0) + elif headers['x-w3c-validator-status'] == 'Invalid': + print "Invalid!" + print "Errors: %s" % headers['x-w3c-validator-errors'] + print "Warnings: %s" % headers['x-w3c-validator-warnings'] + hp = HTMLParser.HTMLParser() + for m in re.findall('
  • .*?
  • ', rbody, re.DOTALL): + r = re.search('Line (\d+).*(.*?)', m, re.DOTALL) + print "Line %s (should be around %s): %s" % (r.group(1), int(r.group(1)) - firstline, hp.unescape(r.group(2))) - r2 = re.search('(.*?)(.*?)(.*?)', unicode(m, 'utf8'), re.DOTALL) - if r2: - s = u"%s%s%s" % r2.groups() - print "Source: %s" % hp.unescape(s).encode('utf-8') - print "" - else: - print "Unknown status: %s" % headers['x-w3c-validator-status'] - print headers - sys.exit(1) - - + r2 = re.search('(.*?)(.*?)(.*?)', unicode(m, 'utf8'), re.DOTALL) + if r2: + s = u"%s%s%s" % r2.groups() + print "Source: %s" % hp.unescape(s).encode('utf-8') + print "" + else: + print "Unknown status: %s" % headers['x-w3c-validator-status'] + print headers + sys.exit(1) + + diff --git a/tools/purgehook/purgehook.py b/tools/purgehook/purgehook.py index 36d259f3..bf2e9f9e 100755 --- a/tools/purgehook/purgehook.py +++ b/tools/purgehook/purgehook.py @@ -14,27 +14,27 @@ import psycopg2 # Templates that we don't want to ban automatically BANNED_TEMPLATES=( - 'base/base.html', + 'base/base.html', ) if __name__ == "__main__": - config = ConfigParser() - config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'purgehook.ini')) - conn = psycopg2.connect(config.get('db', 'dsn')) - curs = conn.cursor() + config = ConfigParser() + config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'purgehook.ini')) + conn = psycopg2.connect(config.get('db', 'dsn')) + curs = conn.cursor() - for l in sys.stdin: - if l.startswith('templates/'): - tmpl = l[len('templates/'):].strip() - if not tmpl in BANNED_TEMPLATES: - curs.execute("SELECT varnish_purge_xkey(%(key)s)", { - 'key': 'pgwt_{0}'.format(hashlib.md5(tmpl).hexdigest()), - }) - elif l.startswith('media/'): - # For media we can't xkey, but the URL is exact so we can - # use a classic single-url purge. - curs.execute("SELECT varnish_purge('^/' || %(u)s || '$')", { - 'u': l.strip(), - }) - conn.commit() - conn.close() + for l in sys.stdin: + if l.startswith('templates/'): + tmpl = l[len('templates/'):].strip() + if not tmpl in BANNED_TEMPLATES: + curs.execute("SELECT varnish_purge_xkey(%(key)s)", { + 'key': 'pgwt_{0}'.format(hashlib.md5(tmpl).hexdigest()), + }) + elif l.startswith('media/'): + # For media we can't xkey, but the URL is exact so we can + # use a classic single-url purge. + curs.execute("SELECT varnish_purge('^/' || %(u)s || '$')", { + 'u': l.strip(), + }) + conn.commit() + conn.close() diff --git a/tools/search/crawler/lib/archives.py b/tools/search/crawler/lib/archives.py index d566264c..7a2014ab 100644 --- a/tools/search/crawler/lib/archives.py +++ b/tools/search/crawler/lib/archives.py @@ -9,159 +9,159 @@ from lib.log import log from lib.parsers import ArchivesParser class MultiListCrawler(object): - def __init__(self, lists, conn, status_interval=30, commit_interval=500): - self.lists = lists - self.conn = conn - self.status_interval = status_interval - self.commit_interval = commit_interval + def __init__(self, lists, conn, status_interval=30, commit_interval=500): + self.lists = lists + self.conn = conn + self.status_interval = status_interval + self.commit_interval = commit_interval - self.queue = Queue() - self.counter = 0 - self.counterlock = threading.RLock() - self.stopevent = threading.Event() + self.queue = Queue() + self.counter = 0 + self.counterlock = threading.RLock() + self.stopevent = threading.Event() - def crawl(self, full=False, month=None): - # Each thread can independently run on one month, so we can get - # a reasonable spread. Therefor, submit them as separate jobs - # to the queue. - for listid, listname in self.lists: - if full: - # Generate a sequence of everything to index - for year in range(1997, datetime.datetime.now().year+1): - for month in range(1,13): - self.queue.put((listid, listname, year, month, -1)) - elif month: - # Do one specific month - pieces = month.split("-") - if len(pieces) != 2: - print "Month format is -, cannot parse '%s'" % month - sys.exit(1) - try: - pieces = [int(x) for x in pieces] - except: - print "Month format is -, cannot convert '%s' to integers" % month - sys.exit(1) - self.queue.put((listid, listname, pieces[0], pieces[1], -1)) - else: - # In incremental scan, we check the current month and the - # previous one, but only for new messages. - curs = self.conn.cursor() - curr = datetime.date.today() - if curr.month == 1: - prev = datetime.date(curr.year-1, 12, 1) - else: - prev = datetime.date(curr.year, curr.month-1, 1) + def crawl(self, full=False, month=None): + # Each thread can independently run on one month, so we can get + # a reasonable spread. Therefor, submit them as separate jobs + # to the queue. + for listid, listname in self.lists: + if full: + # Generate a sequence of everything to index + for year in range(1997, datetime.datetime.now().year+1): + for month in range(1,13): + self.queue.put((listid, listname, year, month, -1)) + elif month: + # Do one specific month + pieces = month.split("-") + if len(pieces) != 2: + print "Month format is -, cannot parse '%s'" % month + sys.exit(1) + try: + pieces = [int(x) for x in pieces] + except: + print "Month format is -, cannot convert '%s' to integers" % month + sys.exit(1) + self.queue.put((listid, listname, pieces[0], pieces[1], -1)) + else: + # In incremental scan, we check the current month and the + # previous one, but only for new messages. + curs = self.conn.cursor() + curr = datetime.date.today() + if curr.month == 1: + prev = datetime.date(curr.year-1, 12, 1) + else: + prev = datetime.date(curr.year, curr.month-1, 1) - for d in curr, prev: - # Figure out what the highest indexed page in this - # month is. - curs.execute("SELECT max(msgnum) FROM messages WHERE list=%(list)s AND year=%(year)s AND month=%(month)s", { - 'list': listid, - 'year': d.year, - 'month': d.month, - }) - x = curs.fetchall() - if x[0][0] != None: - maxmsg = x[0][0] - else: - maxmsg = -1 - self.queue.put((listid, listname, d.year, d.month, maxmsg)) + for d in curr, prev: + # Figure out what the highest indexed page in this + # month is. + curs.execute("SELECT max(msgnum) FROM messages WHERE list=%(list)s AND year=%(year)s AND month=%(month)s", { + 'list': listid, + 'year': d.year, + 'month': d.month, + }) + x = curs.fetchall() + if x[0][0] != None: + maxmsg = x[0][0] + else: + maxmsg = -1 + self.queue.put((listid, listname, d.year, d.month, maxmsg)) - for x in range(5): - t = threading.Thread(name="Indexer %s" % x, - target = lambda: self.crawl_from_queue()) - t.daemon= True - t.start() + for x in range(5): + t = threading.Thread(name="Indexer %s" % x, + target = lambda: self.crawl_from_queue()) + t.daemon= True + t.start() - t = threading.Thread(name="statusthread", target = lambda: self.status_thread()) - t.daemon = True - t.start() + t = threading.Thread(name="statusthread", target = lambda: self.status_thread()) + t.daemon = True + t.start() - # XXX: need to find a way to deal with all threads crashed and - # not done here yet! - self.queue.join() - self.stopevent.set() + # XXX: need to find a way to deal with all threads crashed and + # not done here yet! + self.queue.join() + self.stopevent.set() - return self.counter + return self.counter - def status_thread(self): - lastcommit = 0 - starttime = time.time() - while not self.stopevent.is_set(): - self.stopevent.wait(self.status_interval) - nowtime = time.time() - with self.counterlock: - log("Indexed %s messages so far (%s active threads, %s months still queued, %.1f msg/sec)" % ( - self.counter, - threading.active_count() - 2 , # main thread + status thread - self.queue.qsize(), - self.counter / (nowtime - starttime), - )) - # Commit every 500 messages - if self.counter - lastcommit > self.commit_interval: - lastcommit = self.counter - self.conn.commit() + def status_thread(self): + lastcommit = 0 + starttime = time.time() + while not self.stopevent.is_set(): + self.stopevent.wait(self.status_interval) + nowtime = time.time() + with self.counterlock: + log("Indexed %s messages so far (%s active threads, %s months still queued, %.1f msg/sec)" % ( + self.counter, + threading.active_count() - 2 , # main thread + status thread + self.queue.qsize(), + self.counter / (nowtime - starttime), + )) + # Commit every 500 messages + if self.counter - lastcommit > self.commit_interval: + lastcommit = self.counter + self.conn.commit() - def crawl_from_queue(self): - while not self.stopevent.is_set(): - (listid, listname, year, month, maxmsg) = self.queue.get() - self.crawl_month(listid, listname, year, month, maxmsg) - self.queue.task_done() + def crawl_from_queue(self): + while not self.stopevent.is_set(): + (listid, listname, year, month, maxmsg) = self.queue.get() + self.crawl_month(listid, listname, year, month, maxmsg) + self.queue.task_done() - def crawl_month(self, listid, listname, year, month, maxmsg): - currentmsg = maxmsg - while True: - currentmsg += 1 - try: - if not self.crawl_single_message(listid, listname, year, month, currentmsg): - break - except Exception, e: - log("Exception when crawling %s/%s/%s/%s - %s" % ( - listname, year, month, currentmsg, e)) - # Continue on to try the next message + def crawl_month(self, listid, listname, year, month, maxmsg): + currentmsg = maxmsg + while True: + currentmsg += 1 + try: + if not self.crawl_single_message(listid, listname, year, month, currentmsg): + break + except Exception, e: + log("Exception when crawling %s/%s/%s/%s - %s" % ( + listname, year, month, currentmsg, e)) + # Continue on to try the next message - def crawl_single_message(self, listid, listname, year, month, msgnum): - curs = self.conn.cursor() - h = httplib.HTTPConnection(host="archives.postgresql.org", - port=80, - strict=True, - timeout=10) - url = "/%s/%04d-%02d/msg%05d.php" % ( - listname, - year, - month, - msgnum) - h.putrequest("GET", url) - h.putheader("User-agent", "pgsearch/0.2") - h.putheader("Connection", "close") - h.endheaders() - resp = h.getresponse() - txt = resp.read() - h.close() + def crawl_single_message(self, listid, listname, year, month, msgnum): + curs = self.conn.cursor() + h = httplib.HTTPConnection(host="archives.postgresql.org", + port=80, + strict=True, + timeout=10) + url = "/%s/%04d-%02d/msg%05d.php" % ( + listname, + year, + month, + msgnum) + h.putrequest("GET", url) + h.putheader("User-agent", "pgsearch/0.2") + h.putheader("Connection", "close") + h.endheaders() + resp = h.getresponse() + txt = resp.read() + h.close() - if resp.status == 404: - # Past the end of the month - return False - elif resp.status != 200: - raise Exception("%s/%s/%s/%s returned status %s" % (listname, year, month, msgnum, resp.status)) + if resp.status == 404: + # Past the end of the month + return False + elif resp.status != 200: + raise Exception("%s/%s/%s/%s returned status %s" % (listname, year, month, msgnum, resp.status)) - # Else we have the message! - p = ArchivesParser() - if not p.parse(txt): - log("Failed to parse %s/%s/%s/%s" % (listname, year, month, msgnum)) - # We return true to move on to the next message anyway! - return True - curs.execute("INSERT INTO messages (list, year, month, msgnum, date, subject, author, txt, fti) VALUES (%(listid)s, %(year)s, %(month)s, %(msgnum)s, %(date)s, %(subject)s, %(author)s, %(txt)s, setweight(to_tsvector('pg', %(subject)s), 'A') || to_tsvector('pg', %(txt)s))", { - 'listid': listid, - 'year': year, - 'month': month, - 'msgnum': msgnum, - 'date': p.date, - 'subject': p.subject[:127], - 'author': p.author[:127], - 'txt': p.body, - }) - with self.counterlock: - self.counter += 1 + # Else we have the message! + p = ArchivesParser() + if not p.parse(txt): + log("Failed to parse %s/%s/%s/%s" % (listname, year, month, msgnum)) + # We return true to move on to the next message anyway! + return True + curs.execute("INSERT INTO messages (list, year, month, msgnum, date, subject, author, txt, fti) VALUES (%(listid)s, %(year)s, %(month)s, %(msgnum)s, %(date)s, %(subject)s, %(author)s, %(txt)s, setweight(to_tsvector('pg', %(subject)s), 'A') || to_tsvector('pg', %(txt)s))", { + 'listid': listid, + 'year': year, + 'month': month, + 'msgnum': msgnum, + 'date': p.date, + 'subject': p.subject[:127], + 'author': p.author[:127], + 'txt': p.body, + }) + with self.counterlock: + self.counter += 1 - return True + return True diff --git a/tools/search/crawler/lib/basecrawler.py b/tools/search/crawler/lib/basecrawler.py index e0a02b07..2154e0b1 100644 --- a/tools/search/crawler/lib/basecrawler.py +++ b/tools/search/crawler/lib/basecrawler.py @@ -12,248 +12,248 @@ from lib.log import log from lib.parsers import GenericHtmlParser, lossy_unicode class BaseSiteCrawler(object): - def __init__(self, hostname, dbconn, siteid, serverip=None, https=False): - self.hostname = hostname - self.dbconn = dbconn - self.siteid = siteid - self.serverip = serverip - self.https = https - self.pages_crawled = {} - self.pages_new = 0 - self.pages_updated = 0 - self.pages_deleted = 0 - self.status_interval = 5 + def __init__(self, hostname, dbconn, siteid, serverip=None, https=False): + self.hostname = hostname + self.dbconn = dbconn + self.siteid = siteid + self.serverip = serverip + self.https = https + self.pages_crawled = {} + self.pages_new = 0 + self.pages_updated = 0 + self.pages_deleted = 0 + self.status_interval = 5 - curs = dbconn.cursor() - curs.execute("SELECT suburl, lastscanned FROM webpages WHERE site=%(id)s AND lastscanned IS NOT NULL", {'id': siteid}) - self.scantimes = dict(curs.fetchall()) - self.queue = Queue() - self.counterlock = threading.RLock() - self.stopevent = threading.Event() + curs = dbconn.cursor() + curs.execute("SELECT suburl, lastscanned FROM webpages WHERE site=%(id)s AND lastscanned IS NOT NULL", {'id': siteid}) + self.scantimes = dict(curs.fetchall()) + self.queue = Queue() + self.counterlock = threading.RLock() + self.stopevent = threading.Event() - def crawl(self): - self.init_crawl() + def crawl(self): + self.init_crawl() - # Fire off worker threads - for x in range(5): - t = threading.Thread(name="Indexer %s" % x, - target = lambda: self.crawl_from_queue()) - t.daemon = True - t.start() + # Fire off worker threads + for x in range(5): + t = threading.Thread(name="Indexer %s" % x, + target = lambda: self.crawl_from_queue()) + t.daemon = True + t.start() - t = threading.Thread(name="statusthread", target = lambda: self.status_thread()) - t.daemon = True - t.start() + t = threading.Thread(name="statusthread", target = lambda: self.status_thread()) + t.daemon = True + t.start() - # XXX: need to find a way to deal with all threads crashed and - # not done here yet! - self.queue.join() - self.stopevent.set() + # XXX: need to find a way to deal with all threads crashed and + # not done here yet! + self.queue.join() + self.stopevent.set() - # Remove all pages that we didn't crawl - curs = self.dbconn.cursor() - curs.execute("DELETE FROM webpages WHERE site=%(site)s AND NOT suburl=ANY(%(urls)s)", { - 'site': self.siteid, - 'urls': self.pages_crawled.keys(), - }) - if curs.rowcount: - log("Deleted %s pages no longer accessible" % curs.rowcount) - self.pages_deleted += curs.rowcount + # Remove all pages that we didn't crawl + curs = self.dbconn.cursor() + curs.execute("DELETE FROM webpages WHERE site=%(site)s AND NOT suburl=ANY(%(urls)s)", { + 'site': self.siteid, + 'urls': self.pages_crawled.keys(), + }) + if curs.rowcount: + log("Deleted %s pages no longer accessible" % curs.rowcount) + self.pages_deleted += curs.rowcount - self.dbconn.commit() - log("Considered %s pages, wrote %s updated and %s new, deleted %s." % (len(self.pages_crawled), self.pages_updated, self.pages_new, self.pages_deleted)) + self.dbconn.commit() + log("Considered %s pages, wrote %s updated and %s new, deleted %s." % (len(self.pages_crawled), self.pages_updated, self.pages_new, self.pages_deleted)) - def status_thread(self): - starttime = time.time() - while not self.stopevent.is_set(): - self.stopevent.wait(self.status_interval) - nowtime = time.time() - with self.counterlock: - log("Considered %s pages, wrote %s upd, %s new, %s del (%s threads, %s in queue, %.1f pages/sec)" % ( - len(self.pages_crawled), - self.pages_updated, - self.pages_new, - self.pages_deleted, - threading.active_count() - 2, - self.queue.qsize(), - len(self.pages_crawled) / (nowtime - starttime), - )) + def status_thread(self): + starttime = time.time() + while not self.stopevent.is_set(): + self.stopevent.wait(self.status_interval) + nowtime = time.time() + with self.counterlock: + log("Considered %s pages, wrote %s upd, %s new, %s del (%s threads, %s in queue, %.1f pages/sec)" % ( + len(self.pages_crawled), + self.pages_updated, + self.pages_new, + self.pages_deleted, + threading.active_count() - 2, + self.queue.qsize(), + len(self.pages_crawled) / (nowtime - starttime), + )) - def crawl_from_queue(self): - while not self.stopevent.is_set(): - (url, relprio, internal) = self.queue.get() - try: - self.crawl_page(url, relprio, internal) - except Exception, e: - log("Exception crawling '%s': %s" % (url, e)) - self.queue.task_done() + def crawl_from_queue(self): + while not self.stopevent.is_set(): + (url, relprio, internal) = self.queue.get() + try: + self.crawl_page(url, relprio, internal) + except Exception, e: + log("Exception crawling '%s': %s" % (url, e)) + self.queue.task_done() - def exclude_url(self, url): - return False + def exclude_url(self, url): + return False - def crawl_page(self, url, relprio, internal): - if self.pages_crawled.has_key(url) or self.pages_crawled.has_key(url+"/"): - return + def crawl_page(self, url, relprio, internal): + if self.pages_crawled.has_key(url) or self.pages_crawled.has_key(url+"/"): + return - if self.exclude_url(url): - return + if self.exclude_url(url): + return - self.pages_crawled[url] = 1 - (result, pagedata, lastmod) = self.fetch_page(url) + self.pages_crawled[url] = 1 + (result, pagedata, lastmod) = self.fetch_page(url) - if result == 0: - if pagedata == None: - # Result ok but no data, means that the page was not modified. - # Thus we can happily consider ourselves done here. - return - else: - # Page failed to load or was a redirect, so remove from database - curs = self.dbconn.cursor() - curs.execute("DELETE FROM webpages WHERE site=%(id)s AND suburl=%(url)s", { - 'id': self.siteid, - 'url': url, - }) - with self.counterlock: - self.pages_deleted += curs.rowcount + if result == 0: + if pagedata == None: + # Result ok but no data, means that the page was not modified. + # Thus we can happily consider ourselves done here. + return + else: + # Page failed to load or was a redirect, so remove from database + curs = self.dbconn.cursor() + curs.execute("DELETE FROM webpages WHERE site=%(id)s AND suburl=%(url)s", { + 'id': self.siteid, + 'url': url, + }) + with self.counterlock: + self.pages_deleted += curs.rowcount - if result == 1: - # Page was a redirect, so crawl into that page if we haven't - # already done so. - self.queue_url(pagedata) - return + if result == 1: + # Page was a redirect, so crawl into that page if we haven't + # already done so. + self.queue_url(pagedata) + return - # Try to convert pagedata to a unicode string - pagedata = lossy_unicode(pagedata) - try: - self.page = self.parse_html(pagedata) - except Exception, e: - log("Failed to parse HTML for %s" % url) - log(e) - return + # Try to convert pagedata to a unicode string + pagedata = lossy_unicode(pagedata) + try: + self.page = self.parse_html(pagedata) + except Exception, e: + log("Failed to parse HTML for %s" % url) + log(e) + return - self.save_page(url, lastmod, relprio, internal) - self.post_process_page(url) + self.save_page(url, lastmod, relprio, internal) + self.post_process_page(url) - def save_page(self, url, lastmod, relprio, internal): - if relprio == 0.0: - relprio = 0.5 - params = { - 'title': self.page.title[:128], - 'txt': self.page.gettext(), - 'lastmod': lastmod, - 'site': self.siteid, - 'url': url, - 'relprio': relprio, - 'internal': internal, - } - curs = self.dbconn.cursor() - curs.execute("UPDATE webpages SET title=%(title)s, txt=%(txt)s, fti=setweight(to_tsvector('public.pg', %(title)s), 'A') || to_tsvector('public.pg', %(txt)s), lastscanned=%(lastmod)s, relprio=%(relprio)s, isinternal=%(internal)s WHERE site=%(site)s AND suburl=%(url)s", params) - if curs.rowcount != 1: - curs.execute("INSERT INTO webpages (site, suburl, title, txt, fti, lastscanned, relprio, isinternal) VALUES (%(site)s, %(url)s, %(title)s, %(txt)s, setweight(to_tsvector('public.pg', %(title)s), 'A') || to_tsvector('public.pg', %(txt)s), %(lastmod)s, %(relprio)s, %(internal)s)", params) - with self.counterlock: - self.pages_new += 1 - else: - with self.counterlock: - self.pages_updated += 1 + def save_page(self, url, lastmod, relprio, internal): + if relprio == 0.0: + relprio = 0.5 + params = { + 'title': self.page.title[:128], + 'txt': self.page.gettext(), + 'lastmod': lastmod, + 'site': self.siteid, + 'url': url, + 'relprio': relprio, + 'internal': internal, + } + curs = self.dbconn.cursor() + curs.execute("UPDATE webpages SET title=%(title)s, txt=%(txt)s, fti=setweight(to_tsvector('public.pg', %(title)s), 'A') || to_tsvector('public.pg', %(txt)s), lastscanned=%(lastmod)s, relprio=%(relprio)s, isinternal=%(internal)s WHERE site=%(site)s AND suburl=%(url)s", params) + if curs.rowcount != 1: + curs.execute("INSERT INTO webpages (site, suburl, title, txt, fti, lastscanned, relprio, isinternal) VALUES (%(site)s, %(url)s, %(title)s, %(txt)s, setweight(to_tsvector('public.pg', %(title)s), 'A') || to_tsvector('public.pg', %(txt)s), %(lastmod)s, %(relprio)s, %(internal)s)", params) + with self.counterlock: + self.pages_new += 1 + else: + with self.counterlock: + self.pages_updated += 1 - ACCEPTED_CONTENTTYPES = ("text/html", "text/plain", ) - def accept_contenttype(self, contenttype): - # Split apart if there is a "; charset=" in it - if contenttype.find(";"): - contenttype = contenttype.split(';',2)[0] - return contenttype in self.ACCEPTED_CONTENTTYPES + ACCEPTED_CONTENTTYPES = ("text/html", "text/plain", ) + def accept_contenttype(self, contenttype): + # Split apart if there is a "; charset=" in it + if contenttype.find(";"): + contenttype = contenttype.split(';',2)[0] + return contenttype in self.ACCEPTED_CONTENTTYPES - def fetch_page(self, url): - try: - # Unfortunatley, persistent connections seem quite unreliable, - # so create a new one for each page. - if self.serverip: - if not self.https: - h = httplib.HTTPConnection(host=self.serverip, port=80, strict=True, timeout=10) - else: - h = httplib.HTTPSConnection(host=self.serverip, port=443, strict=True, timeout=10, context=ssl._create_unverified_context()) - h.putrequest("GET", url, skip_host=1) - h.putheader("Host", self.hostname) - else: - if not self.https: - h = httplib.HTTPConnection(host=self.hostname, port=80, strict=True, timeout=10) - else: - h = httplib.HTTPSConnection(host=self.hostname, port=443, strict=True, timeout=10, context=ssl._create_unverified_context()) - h.putrequest("GET", url) - h.putheader("User-agent","pgsearch/0.2") - h.putheader("Connection","close") - if self.scantimes.has_key(url): - h.putheader("If-Modified-Since", formatdate(time.mktime(self.scantimes[url].timetuple()))) - h.endheaders() - resp = h.getresponse() + def fetch_page(self, url): + try: + # Unfortunatley, persistent connections seem quite unreliable, + # so create a new one for each page. + if self.serverip: + if not self.https: + h = httplib.HTTPConnection(host=self.serverip, port=80, strict=True, timeout=10) + else: + h = httplib.HTTPSConnection(host=self.serverip, port=443, strict=True, timeout=10, context=ssl._create_unverified_context()) + h.putrequest("GET", url, skip_host=1) + h.putheader("Host", self.hostname) + else: + if not self.https: + h = httplib.HTTPConnection(host=self.hostname, port=80, strict=True, timeout=10) + else: + h = httplib.HTTPSConnection(host=self.hostname, port=443, strict=True, timeout=10, context=ssl._create_unverified_context()) + h.putrequest("GET", url) + h.putheader("User-agent","pgsearch/0.2") + h.putheader("Connection","close") + if self.scantimes.has_key(url): + h.putheader("If-Modified-Since", formatdate(time.mktime(self.scantimes[url].timetuple()))) + h.endheaders() + resp = h.getresponse() - if resp.status == 200: - if not self.accept_contenttype(resp.getheader("content-type")): - # Content-type we're not interested in - return (2, None, None) - return (0, resp.read(), self.get_date(resp.getheader("last-modified"))) - elif resp.status == 304: - # Not modified, so no need to reprocess, but also don't - # give an error message for it... - return (0, None, None) - elif resp.status == 301: - # A redirect... So try again with the redirected-to URL - # We send this through our link resolver to deal with both - # absolute and relative URLs - if resp.getheader('location', '') == '': - log("Url %s returned empty redirect" % url) - return (2, None, None) + if resp.status == 200: + if not self.accept_contenttype(resp.getheader("content-type")): + # Content-type we're not interested in + return (2, None, None) + return (0, resp.read(), self.get_date(resp.getheader("last-modified"))) + elif resp.status == 304: + # Not modified, so no need to reprocess, but also don't + # give an error message for it... + return (0, None, None) + elif resp.status == 301: + # A redirect... So try again with the redirected-to URL + # We send this through our link resolver to deal with both + # absolute and relative URLs + if resp.getheader('location', '') == '': + log("Url %s returned empty redirect" % url) + return (2, None, None) - for tgt in self.resolve_links([resp.getheader('location', '')], url): - return (1, tgt, None) - # No redirect at all found, becaue it was invalid? - return (2, None, None) - else: - #print "Url %s returned status %s" % (url, resp.status) - pass - except Exception, e: - log("Exception when loading url %s: %s" % (url, e)) - return (2, None, None) + for tgt in self.resolve_links([resp.getheader('location', '')], url): + return (1, tgt, None) + # No redirect at all found, becaue it was invalid? + return (2, None, None) + else: + #print "Url %s returned status %s" % (url, resp.status) + pass + except Exception, e: + log("Exception when loading url %s: %s" % (url, e)) + return (2, None, None) - def get_date(self, date): - d = parsedate(date) - if d: - return datetime.datetime.fromtimestamp(time.mktime(d)) - return datetime.datetime.now() + def get_date(self, date): + d = parsedate(date) + if d: + return datetime.datetime.fromtimestamp(time.mktime(d)) + return datetime.datetime.now() - def parse_html(self, page): - if page == None: - return None + def parse_html(self, page): + if page == None: + return None - p = GenericHtmlParser() - p.feed(page) - return p + p = GenericHtmlParser() + p.feed(page) + return p - def resolve_links(self, links, pageurl): - for x in links: - p = urlparse.urlsplit(x) - if p.scheme in ("http", "https"): - if p.netloc != self.hostname: - # Remote link - continue - # Turn this into a host-relative url - p = ('', '', p.path, p.query, '') + def resolve_links(self, links, pageurl): + for x in links: + p = urlparse.urlsplit(x) + if p.scheme in ("http", "https"): + if p.netloc != self.hostname: + # Remote link + continue + # Turn this into a host-relative url + p = ('', '', p.path, p.query, '') - if p[4] != "" or p[3] != "": - # Remove fragments (part of the url past #) - p = (p[0], p[1], p[2], '', '') + if p[4] != "" or p[3] != "": + # Remove fragments (part of the url past #) + p = (p[0], p[1], p[2], '', '') - if p[0] == "": - if p[2] == "": - # Nothing in the path, so it's a pure fragment url - continue + if p[0] == "": + if p[2] == "": + # Nothing in the path, so it's a pure fragment url + continue - if p[2][0] == "/": - # Absolute link on this host, so just return it - yield urlparse.urlunsplit(p) - else: - # Relative link - yield urlparse.urljoin(pageurl, urlparse.urlunsplit(p)) - else: - # Ignore unknown url schemes like mailto - pass + if p[2][0] == "/": + # Absolute link on this host, so just return it + yield urlparse.urlunsplit(p) + else: + # Relative link + yield urlparse.urljoin(pageurl, urlparse.urlunsplit(p)) + else: + # Ignore unknown url schemes like mailto + pass diff --git a/tools/search/crawler/lib/genericsite.py b/tools/search/crawler/lib/genericsite.py index b74bb469..718b1f0d 100644 --- a/tools/search/crawler/lib/genericsite.py +++ b/tools/search/crawler/lib/genericsite.py @@ -4,49 +4,49 @@ from basecrawler import BaseSiteCrawler from parsers import RobotsParser class GenericSiteCrawler(BaseSiteCrawler): - def __init__(self, hostname, dbconn, siteid, https=False): - super(GenericSiteCrawler, self).__init__(hostname, dbconn, siteid, https=https) + def __init__(self, hostname, dbconn, siteid, https=False): + super(GenericSiteCrawler, self).__init__(hostname, dbconn, siteid, https=https) - def init_crawl(self): - # Load robots.txt - self.robots = RobotsParser("http://%s/robots.txt" % self.hostname) + def init_crawl(self): + # Load robots.txt + self.robots = RobotsParser("http://%s/robots.txt" % self.hostname) - # We need to seed the crawler with every URL we've already seen, since - # we don't recrawl the contents if they haven't changed. - allpages = self.scantimes.keys() + # We need to seed the crawler with every URL we've already seen, since + # we don't recrawl the contents if they haven't changed. + allpages = self.scantimes.keys() - # Figure out if there are any excludes to deal with (beyond the - # robots.txt ones) - curs = self.dbconn.cursor() - curs.execute("SELECT suburlre FROM site_excludes WHERE site=%(site)s", { - 'site': self.siteid, - }) - self.extra_excludes = [re.compile(x) for x, in curs.fetchall()] + # Figure out if there are any excludes to deal with (beyond the + # robots.txt ones) + curs = self.dbconn.cursor() + curs.execute("SELECT suburlre FROM site_excludes WHERE site=%(site)s", { + 'site': self.siteid, + }) + self.extra_excludes = [re.compile(x) for x, in curs.fetchall()] - # We *always* crawl the root page, of course - self.queue.put(("/", 0.5, False)) + # We *always* crawl the root page, of course + self.queue.put(("/", 0.5, False)) - # Now do all the other pages - for x in allpages: - self.queue.put((x, 0.5, False)) + # Now do all the other pages + for x in allpages: + self.queue.put((x, 0.5, False)) - def exclude_url(self, url): - if ".." in url: - return True - if self.robots and self.robots.block_url(url): - return True - for r in self.extra_excludes: - if r.search(url): - return True - return False + def exclude_url(self, url): + if ".." in url: + return True + if self.robots and self.robots.block_url(url): + return True + for r in self.extra_excludes: + if r.search(url): + return True + return False - def queue_url(self, url): - self.queue.put((url.strip(), 0.5, False)) + def queue_url(self, url): + self.queue.put((url.strip(), 0.5, False)) - def post_process_page(self, url): - for l in self.resolve_links(self.page.links, url): - if self.pages_crawled.has_key(l) or self.pages_crawled.has_key(l+"/"): - continue - if self.exclude_url(l): - continue - self.queue_url(l) + def post_process_page(self, url): + for l in self.resolve_links(self.page.links, url): + if self.pages_crawled.has_key(l) or self.pages_crawled.has_key(l+"/"): + continue + if self.exclude_url(l): + continue + self.queue_url(l) diff --git a/tools/search/crawler/lib/log.py b/tools/search/crawler/lib/log.py index 8c147899..ce566034 100644 --- a/tools/search/crawler/lib/log.py +++ b/tools/search/crawler/lib/log.py @@ -2,5 +2,5 @@ # more here in the future :) import datetime def log(msg): - print "%s: %s" % (datetime.datetime.now(), msg) + print "%s: %s" % (datetime.datetime.now(), msg) diff --git a/tools/search/crawler/lib/parsers.py b/tools/search/crawler/lib/parsers.py index b1ad9c53..89f0ff12 100644 --- a/tools/search/crawler/lib/parsers.py +++ b/tools/search/crawler/lib/parsers.py @@ -9,162 +9,162 @@ from HTMLParser import HTMLParser from lib.log import log class GenericHtmlParser(HTMLParser): - def __init__(self): - HTMLParser.__init__(self) - self.lasttag = None - self.title = "" - self.pagedata = StringIO() - self.links = [] - self.inbody = False + def __init__(self): + HTMLParser.__init__(self) + self.lasttag = None + self.title = "" + self.pagedata = StringIO() + self.links = [] + self.inbody = False - def handle_starttag(self, tag, attrs): - self.lasttag = tag - if tag == "body": - self.inbody = True - if tag == "a": - for a,v in attrs: - if a == "href": - self.links.append(v) + def handle_starttag(self, tag, attrs): + self.lasttag = tag + if tag == "body": + self.inbody = True + if tag == "a": + for a,v in attrs: + if a == "href": + self.links.append(v) - def handle_endtag(self, tag): - if tag == "body": - self.inbody = False + def handle_endtag(self, tag): + if tag == "body": + self.inbody = False - DATA_IGNORE_TAGS = ("script",) - def handle_data(self, data): - d = data.strip() - if len(d) < 2: - return + DATA_IGNORE_TAGS = ("script",) + def handle_data(self, data): + d = data.strip() + if len(d) < 2: + return - if self.lasttag == "title": - self.title += d - return + if self.lasttag == "title": + self.title += d + return - # Never store text found in the HEAD - if not self.inbody: - return + # Never store text found in the HEAD + if not self.inbody: + return - # Ignore specific tags, like SCRIPT - if self.lasttag in self.DATA_IGNORE_TAGS: - return + # Ignore specific tags, like SCRIPT + if self.lasttag in self.DATA_IGNORE_TAGS: + return - self.pagedata.write(d) - self.pagedata.write("\n") + self.pagedata.write(d) + self.pagedata.write("\n") - def gettext(self): - self.pagedata.seek(0) - return self.pagedata.read() + def gettext(self): + self.pagedata.seek(0) + return self.pagedata.read() class ArchivesParser(object): - rematcher = re.compile(".*.*.*(.*)", re.DOTALL) - hp = HTMLParser() - def __init__(self): - self.subject = None - self.author = None - self.date = None - self.body = None + rematcher = re.compile(".*.*.*(.*)", re.DOTALL) + hp = HTMLParser() + def __init__(self): + self.subject = None + self.author = None + self.date = None + self.body = None - def parse(self, contents): - contents = lossy_unicode(contents) - match = self.rematcher.search(contents) - if not match: - return False - self.subject = self.hp.unescape(match.group(1)) - self.author = self.almost_rot13(self.hp.unescape(match.group(2))) - if not self.parse_date(self.hp.unescape(match.group(3))): - return False - self.body = self.hp.unescape(match.group(4)) - return True + def parse(self, contents): + contents = lossy_unicode(contents) + match = self.rematcher.search(contents) + if not match: + return False + self.subject = self.hp.unescape(match.group(1)) + self.author = self.almost_rot13(self.hp.unescape(match.group(2))) + if not self.parse_date(self.hp.unescape(match.group(3))): + return False + self.body = self.hp.unescape(match.group(4)) + return True - _date_multi_re = re.compile(' \((\w+\s\w+|)\)$') - _date_trailing_envelope = re.compile('\s+\(envelope.*\)$') - def parse_date(self, d): - # For some reason, we have dates that look like this: - # http://archives.postgresql.org/pgsql-bugs/1999-05/msg00018.php - # Looks like an mhonarc bug, but let's just remove that trailing - # stuff here to be sure... - if self._date_trailing_envelope.search(d): - d = self._date_trailing_envelope.sub('', d) + _date_multi_re = re.compile(' \((\w+\s\w+|)\)$') + _date_trailing_envelope = re.compile('\s+\(envelope.*\)$') + def parse_date(self, d): + # For some reason, we have dates that look like this: + # http://archives.postgresql.org/pgsql-bugs/1999-05/msg00018.php + # Looks like an mhonarc bug, but let's just remove that trailing + # stuff here to be sure... + if self._date_trailing_envelope.search(d): + d = self._date_trailing_envelope.sub('', d) - # We have a number of dates in the format - # " +0200 (MET DST)" - # or similar. The problem coming from the space within the - # parenthesis, or if the contents of the parenthesis is - # completely empty - if self._date_multi_re.search(d): - d = self._date_multi_re.sub('', d) - # Isn't it wonderful with a string with a trailing quote but no - # leading quote? MUA's are weird... - if d.endswith('"') and not d.startswith('"'): - d = d[:-1] + # We have a number of dates in the format + # " +0200 (MET DST)" + # or similar. The problem coming from the space within the + # parenthesis, or if the contents of the parenthesis is + # completely empty + if self._date_multi_re.search(d): + d = self._date_multi_re.sub('', d) + # Isn't it wonderful with a string with a trailing quote but no + # leading quote? MUA's are weird... + if d.endswith('"') and not d.startswith('"'): + d = d[:-1] - # We also have "known incorrect timezone specs". - if d.endswith('MST7MDT'): - d = d[:-4] - elif d.endswith('METDST'): - d = d[:-3] - elif d.endswith('"MET'): - d = d[:-4] + "MET" + # We also have "known incorrect timezone specs". + if d.endswith('MST7MDT'): + d = d[:-4] + elif d.endswith('METDST'): + d = d[:-3] + elif d.endswith('"MET'): + d = d[:-4] + "MET" - try: - self.date = dateutil.parser.parse(d) - except ValueError: - log("Failed to parse date '%s'" % d) - return False + try: + self.date = dateutil.parser.parse(d) + except ValueError: + log("Failed to parse date '%s'" % d) + return False - if self.date.utcoffset(): - # We have some messages with completely incorrect utc offsets, - # so we need to reject those too - if self.date.utcoffset() > timedelta(hours=12) or self.date.utcoffset() < timedelta(hours=-12): - log("Failed to parse date %s', timezone offset out of range." % d) - return False + if self.date.utcoffset(): + # We have some messages with completely incorrect utc offsets, + # so we need to reject those too + if self.date.utcoffset() > timedelta(hours=12) or self.date.utcoffset() < timedelta(hours=-12): + log("Failed to parse date %s', timezone offset out of range." % d) + return False - return True + return True - # Semi-hacked rot13, because the one used by mhonarc is broken. - # So we copy the brokenness here. - # This code is from MHonArc/ewhutil.pl, mrot13() - _arot13_trans = dict(zip(map(ord, - u'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[abcdefghijklmnopqrstuvwxyz'), - u'NOPQRSTUVWXYZ[@ABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm')) - def almost_rot13(self, s): - return unicode(s).translate(self._arot13_trans) + # Semi-hacked rot13, because the one used by mhonarc is broken. + # So we copy the brokenness here. + # This code is from MHonArc/ewhutil.pl, mrot13() + _arot13_trans = dict(zip(map(ord, + u'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[abcdefghijklmnopqrstuvwxyz'), + u'NOPQRSTUVWXYZ[@ABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm')) + def almost_rot13(self, s): + return unicode(s).translate(self._arot13_trans) class RobotsParser(object): - def __init__(self, url): - try: - u = urllib.urlopen(url) - txt = u.read() - u.close() - self.disallows = [] - activeagent = False - for l in txt.splitlines(): - if l.lower().startswith("user-agent: ") and len(l) > 12: - if l[12] == "*" or l[12:20] == "pgsearch": - activeagent = True - else: - activeagent = False - if activeagent and l.lower().startswith("disallow: "): - self.disallows.append(l[10:]) - except Exception: - self.disallows = [] + def __init__(self, url): + try: + u = urllib.urlopen(url) + txt = u.read() + u.close() + self.disallows = [] + activeagent = False + for l in txt.splitlines(): + if l.lower().startswith("user-agent: ") and len(l) > 12: + if l[12] == "*" or l[12:20] == "pgsearch": + activeagent = True + else: + activeagent = False + if activeagent and l.lower().startswith("disallow: "): + self.disallows.append(l[10:]) + except Exception: + self.disallows = [] - def block_url(self, url): - # Assumes url comes in as relative - for d in self.disallows: - if url.startswith(d): - return True - return False + def block_url(self, url): + # Assumes url comes in as relative + for d in self.disallows: + if url.startswith(d): + return True + return False # Convert a string to unicode, try utf8 first, then latin1, then give # up and do a best-effort utf8. def lossy_unicode(s): - try: - return unicode(s, 'utf8') - except UnicodeDecodeError: - try: - return unicode(s, 'latin1') - except UnicodeDecodeError: - return unicode(s, 'utf8', 'replace') + try: + return unicode(s, 'utf8') + except UnicodeDecodeError: + try: + return unicode(s, 'latin1') + except UnicodeDecodeError: + return unicode(s, 'utf8', 'replace') diff --git a/tools/search/crawler/lib/sitemapsite.py b/tools/search/crawler/lib/sitemapsite.py index 439bfbf3..25d6fb72 100644 --- a/tools/search/crawler/lib/sitemapsite.py +++ b/tools/search/crawler/lib/sitemapsite.py @@ -6,97 +6,97 @@ from lib.log import log from lib.basecrawler import BaseSiteCrawler class SitemapParser(object): - def __init__(self): - self.urls = [] + def __init__(self): + self.urls = [] - def parse(self, f, internal=False): - self.parser = xml.parsers.expat.ParserCreate() - self.currenturl = "" - self.currentprio = 0 - self.currentlastmod = None - self.geturl = False - self.getprio = False - self.getlastmod = False - self.currstr = "" - self.internal = False - self.parser.StartElementHandler = lambda name,attrs: self.processelement(name,attrs) - self.parser.EndElementHandler = lambda name: self.processendelement(name) - self.parser.CharacterDataHandler = lambda data: self.processcharacterdata(data) - self.internal = internal + def parse(self, f, internal=False): + self.parser = xml.parsers.expat.ParserCreate() + self.currenturl = "" + self.currentprio = 0 + self.currentlastmod = None + self.geturl = False + self.getprio = False + self.getlastmod = False + self.currstr = "" + self.internal = False + self.parser.StartElementHandler = lambda name,attrs: self.processelement(name,attrs) + self.parser.EndElementHandler = lambda name: self.processendelement(name) + self.parser.CharacterDataHandler = lambda data: self.processcharacterdata(data) + self.internal = internal - self.parser.ParseFile(f) + self.parser.ParseFile(f) - def processelement(self, name, attrs): - if name == "url": - self.currenturl = "" - self.currentprio = 0 - self.currentlastmod = None - elif name == "loc": - self.geturl = True - self.currstr = "" - elif name == "priority": - self.getprio = True - self.currstr = "" - elif name == "lastmod": - self.getlastmod = True - self.currstr = "" + def processelement(self, name, attrs): + if name == "url": + self.currenturl = "" + self.currentprio = 0 + self.currentlastmod = None + elif name == "loc": + self.geturl = True + self.currstr = "" + elif name == "priority": + self.getprio = True + self.currstr = "" + elif name == "lastmod": + self.getlastmod = True + self.currstr = "" - def processendelement(self, name): - if name == "loc": - self.geturl = False - self.currenturl = self.currstr - elif name == "priority": - self.getprio = False - self.currentprio = float(self.currstr) - elif name == "lastmod": - self.getlastmod = False - self.currentlastmod = dateutil.parser.parse(self.currstr) - elif name == "url": - self.urls.append((self.currenturl, self.currentprio, self.currentlastmod, self.internal)) + def processendelement(self, name): + if name == "loc": + self.geturl = False + self.currenturl = self.currstr + elif name == "priority": + self.getprio = False + self.currentprio = float(self.currstr) + elif name == "lastmod": + self.getlastmod = False + self.currentlastmod = dateutil.parser.parse(self.currstr) + elif name == "url": + self.urls.append((self.currenturl, self.currentprio, self.currentlastmod, self.internal)) - def processcharacterdata(self, data): - if self.geturl or self.getprio or self.getlastmod: - self.currstr += data + def processcharacterdata(self, data): + if self.geturl or self.getprio or self.getlastmod: + self.currstr += data class SitemapSiteCrawler(BaseSiteCrawler): - def __init__(self, hostname, dbconn, siteid, serverip, https=False): - super(SitemapSiteCrawler, self).__init__(hostname, dbconn, siteid, serverip, https) + def __init__(self, hostname, dbconn, siteid, serverip, https=False): + super(SitemapSiteCrawler, self).__init__(hostname, dbconn, siteid, serverip, https) - def init_crawl(self): - # Fetch the sitemap. We ignore robots.txt in this case, and - # assume it's always under /sitemap.xml - u = urllib.urlopen("https://%s/sitemap.xml" % self.hostname) - p = SitemapParser() - p.parse(u) - u.close() + def init_crawl(self): + # Fetch the sitemap. We ignore robots.txt in this case, and + # assume it's always under /sitemap.xml + u = urllib.urlopen("https://%s/sitemap.xml" % self.hostname) + p = SitemapParser() + p.parse(u) + u.close() - # Attempt to fetch a sitempa_internal.xml. This is used to index - # pages on our internal search engine that we don't want on - # Google. They should also be excluded from default search - # results (unless searching with a specific suburl) - u = urllib.urlopen("https://%s/sitemap_internal.xml" % self.hostname) - if u.getcode() == 200: - p.parse(u, True) - u.close() + # Attempt to fetch a sitempa_internal.xml. This is used to index + # pages on our internal search engine that we don't want on + # Google. They should also be excluded from default search + # results (unless searching with a specific suburl) + u = urllib.urlopen("https://%s/sitemap_internal.xml" % self.hostname) + if u.getcode() == 200: + p.parse(u, True) + u.close() - for url, prio, lastmod, internal in p.urls: - # Advance 8 characters - length of https://. - url = url[len(self.hostname)+8:] - if lastmod: - if self.scantimes.has_key(url): - if lastmod < self.scantimes[url]: - # Not modified since last scan, so don't reload - # Stick it in the list of pages we've scanned though, - # to make sure we don't remove it... - self.pages_crawled[url] = 1 - continue - self.queue.put((url, prio, internal)) + for url, prio, lastmod, internal in p.urls: + # Advance 8 characters - length of https://. + url = url[len(self.hostname)+8:] + if lastmod: + if self.scantimes.has_key(url): + if lastmod < self.scantimes[url]: + # Not modified since last scan, so don't reload + # Stick it in the list of pages we've scanned though, + # to make sure we don't remove it... + self.pages_crawled[url] = 1 + continue + self.queue.put((url, prio, internal)) - log("About to crawl %s pages from sitemap" % self.queue.qsize()) + log("About to crawl %s pages from sitemap" % self.queue.qsize()) - # Stub functions used when crawling, ignored here - def queue_url(self, url): - pass + # Stub functions used when crawling, ignored here + def queue_url(self, url): + pass - def post_process_page(self, url): - pass + def post_process_page(self, url): + pass diff --git a/tools/search/crawler/lib/threadwrapper.py b/tools/search/crawler/lib/threadwrapper.py index 4b39ac9e..b70571dd 100644 --- a/tools/search/crawler/lib/threadwrapper.py +++ b/tools/search/crawler/lib/threadwrapper.py @@ -7,16 +7,16 @@ from multiprocessing import Process # NOTE! Database connections and similar objects must be instantiated # in the subprocess, and not in the master, to be fully safe! def threadwrapper(func, *args): - p = Process(target=func, args=args) - p.start() + p = Process(target=func, args=args) + p.start() - # Wait for the child to exit, or if an interrupt signal is delivered, - # forcibly terminate the child. - try: - p.join() - except KeyboardInterrupt, e: - print "Keyboard interrupt, terminating child process!" - p.terminate() - except Exception, e: - print "Exception %s, terminating child process!" % e - p.terminate() + # Wait for the child to exit, or if an interrupt signal is delivered, + # forcibly terminate the child. + try: + p.join() + except KeyboardInterrupt, e: + print "Keyboard interrupt, terminating child process!" + p.terminate() + except Exception, e: + print "Exception %s, terminating child process!" % e + p.terminate() diff --git a/tools/search/crawler/listcrawler.py b/tools/search/crawler/listcrawler.py index 0fb2054a..ea11bec2 100755 --- a/tools/search/crawler/listcrawler.py +++ b/tools/search/crawler/listcrawler.py @@ -11,50 +11,50 @@ import sys import time def doit(opt): - cp = ConfigParser() - cp.read("search.ini") - psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) - conn = psycopg2.connect(cp.get("search","db")) + cp = ConfigParser() + cp.read("search.ini") + psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) + conn = psycopg2.connect(cp.get("search","db")) - curs = conn.cursor() + curs = conn.cursor() - if opt.list: - # Multiple lists can be specified with a comma separator (no spaces) - curs.execute("SELECT id,name FROM lists WHERE name=ANY(%(names)s)", { - 'names': opt.list.split(','), - }) - else: - curs.execute("SELECT id,name FROM lists WHERE active ORDER BY id") + if opt.list: + # Multiple lists can be specified with a comma separator (no spaces) + curs.execute("SELECT id,name FROM lists WHERE name=ANY(%(names)s)", { + 'names': opt.list.split(','), + }) + else: + curs.execute("SELECT id,name FROM lists WHERE active ORDER BY id") - listinfo = [(id,name) for id,name in curs.fetchall()] - c = MultiListCrawler(listinfo, conn, opt.status_interval, opt.commit_interval) - n = c.crawl(opt.full, opt.month) + listinfo = [(id,name) for id,name in curs.fetchall()] + c = MultiListCrawler(listinfo, conn, opt.status_interval, opt.commit_interval) + n = c.crawl(opt.full, opt.month) - # Update total counts - curs.execute("WITH t AS (SELECT list,count(*) AS c FROM messages GROUP BY list) UPDATE lists SET pagecount=t.c FROM t WHERE id=t.list") - # Indicate when we crawled - curs.execute("UPDATE lastcrawl SET lastcrawl=CURRENT_TIMESTAMP") - conn.commit() + # Update total counts + curs.execute("WITH t AS (SELECT list,count(*) AS c FROM messages GROUP BY list) UPDATE lists SET pagecount=t.c FROM t WHERE id=t.list") + # Indicate when we crawled + curs.execute("UPDATE lastcrawl SET lastcrawl=CURRENT_TIMESTAMP") + conn.commit() - log("Indexed %s messages" % n) - time.sleep(1) + log("Indexed %s messages" % n) + time.sleep(1) if __name__=="__main__": - parser = OptionParser() - parser.add_option("-l", "--list", dest='list', help="Crawl only this list") - parser.add_option("-m", "--month", dest='month', help="Crawl only this month") - parser.add_option("-f", "--full", dest='full', action="store_true", help="Make a full crawl") - parser.add_option("-t", "--status-interval", dest='status_interval', help="Seconds between status updates") - parser.add_option("-c", "--commit-interval", dest='commit_interval', help="Messages between each commit") + parser = OptionParser() + parser.add_option("-l", "--list", dest='list', help="Crawl only this list") + parser.add_option("-m", "--month", dest='month', help="Crawl only this month") + parser.add_option("-f", "--full", dest='full', action="store_true", help="Make a full crawl") + parser.add_option("-t", "--status-interval", dest='status_interval', help="Seconds between status updates") + parser.add_option("-c", "--commit-interval", dest='commit_interval', help="Messages between each commit") - (opt, args) = parser.parse_args() + (opt, args) = parser.parse_args() - if opt.full and opt.month: - print "Can't use both full and specific month!" - sys.exit(1) + if opt.full and opt.month: + print "Can't use both full and specific month!" + sys.exit(1) - # assign default values - opt.status_interval = opt.status_interval and int(opt.status_interval) or 30 - opt.commit_interval = opt.commit_interval and int(opt.commit_interval) or 500 + # assign default values + opt.status_interval = opt.status_interval and int(opt.status_interval) or 30 + opt.commit_interval = opt.commit_interval and int(opt.commit_interval) or 500 - threadwrapper(doit, opt) + threadwrapper(doit, opt) diff --git a/tools/search/crawler/listsync.py b/tools/search/crawler/listsync.py index 773d7bb4..c5bc72c6 100755 --- a/tools/search/crawler/listsync.py +++ b/tools/search/crawler/listsync.py @@ -8,42 +8,42 @@ import urllib import simplejson as json if __name__=="__main__": - cp = ConfigParser() - cp.read("search.ini") - psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) - conn = psycopg2.connect(cp.get("search","db")) - curs = conn.cursor() + cp = ConfigParser() + cp.read("search.ini") + psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) + conn = psycopg2.connect(cp.get("search","db")) + curs = conn.cursor() - u = urllib.urlopen("http://%s/community/lists/listinfo/" % cp.get("search", "web")) - obj = json.load(u) - u.close() + u = urllib.urlopen("http://%s/community/lists/listinfo/" % cp.get("search", "web")) + obj = json.load(u) + u.close() - # We don't care about the groups here, just the lists! - curs.execute("SELECT id, name, grp, active FROM lists") - lists = curs.fetchall() - for id, name, groupid, active in lists: - thislist = [x for x in obj['lists'] if x['id'] == id] - if len(thislist) == 0: - log("List %s should be removed, do that manually!" % name) - else: - # Compare contents of list - l = thislist[0] - if l['name'] != name: - log("Renaming list %s -> %s" % (name, l['name'])) - curs.execute("UPDATE lists SET name=%(name)s WHERE id=%(id)s", l) + # We don't care about the groups here, just the lists! + curs.execute("SELECT id, name, grp, active FROM lists") + lists = curs.fetchall() + for id, name, groupid, active in lists: + thislist = [x for x in obj['lists'] if x['id'] == id] + if len(thislist) == 0: + log("List %s should be removed, do that manually!" % name) + else: + # Compare contents of list + l = thislist[0] + if l['name'] != name: + log("Renaming list %s -> %s" % (name, l['name'])) + curs.execute("UPDATE lists SET name=%(name)s WHERE id=%(id)s", l) - if thislist[0]['active'] != active: - log("Changing active flag for %s to %s" % (l['name'], l['active'])) - curs.execute("UPDATE lists SET active=%(active)s WHERE id=%(id)s", l) - if thislist[0]['groupid'] != groupid: - log("Changing group for %s to %s" % (l['name'], l['groupid'])) - curs.execute("UPDATE lists SET grp=%(groupid)s WHERE id=%(id)s", l) + if thislist[0]['active'] != active: + log("Changing active flag for %s to %s" % (l['name'], l['active'])) + curs.execute("UPDATE lists SET active=%(active)s WHERE id=%(id)s", l) + if thislist[0]['groupid'] != groupid: + log("Changing group for %s to %s" % (l['name'], l['groupid'])) + curs.execute("UPDATE lists SET grp=%(groupid)s WHERE id=%(id)s", l) - for l in obj['lists']: - thislist = [x for x in lists if x[0] == l['id']] - if len(thislist) == 0: - log("Adding list %s" % l['name']) - curs.execute("INSERT INTO lists (id, name, grp, active, pagecount) VALUES (%(id)s, %(name)s, %(groupid)s, %(active)s, 0)", - l) + for l in obj['lists']: + thislist = [x for x in lists if x[0] == l['id']] + if len(thislist) == 0: + log("Adding list %s" % l['name']) + curs.execute("INSERT INTO lists (id, name, grp, active, pagecount) VALUES (%(id)s, %(name)s, %(groupid)s, %(active)s, 0)", + l) - conn.commit() + conn.commit() diff --git a/tools/search/crawler/webcrawler.py b/tools/search/crawler/webcrawler.py index 504bf8eb..2f895f17 100755 --- a/tools/search/crawler/webcrawler.py +++ b/tools/search/crawler/webcrawler.py @@ -11,31 +11,31 @@ import psycopg2 import time def doit(): - psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) - conn = psycopg2.connect(cp.get("search","db")) + psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) + conn = psycopg2.connect(cp.get("search","db")) - curs = conn.cursor() + curs = conn.cursor() - # Start by indexing the main website - log("Starting indexing of main website") - SitemapSiteCrawler("www.postgresql.org", conn, 1, cp.get("search", "frontendip"), True).crawl() - conn.commit() + # Start by indexing the main website + log("Starting indexing of main website") + SitemapSiteCrawler("www.postgresql.org", conn, 1, cp.get("search", "frontendip"), True).crawl() + conn.commit() - # Skip id=1, which is the main site.. - curs.execute("SELECT id, hostname, https FROM sites WHERE id>1") - for siteid, hostname, https in curs.fetchall(): - log("Starting indexing of %s" % hostname) - GenericSiteCrawler(hostname, conn, siteid, https).crawl() - conn.commit() + # Skip id=1, which is the main site.. + curs.execute("SELECT id, hostname, https FROM sites WHERE id>1") + for siteid, hostname, https in curs.fetchall(): + log("Starting indexing of %s" % hostname) + GenericSiteCrawler(hostname, conn, siteid, https).crawl() + conn.commit() - curs.execute("WITH t AS (SELECT site,count(*) AS c FROM webpages GROUP BY site) UPDATE sites SET pagecount=t.c FROM t WHERE id=t.site") - conn.commit() + curs.execute("WITH t AS (SELECT site,count(*) AS c FROM webpages GROUP BY site) UPDATE sites SET pagecount=t.c FROM t WHERE id=t.site") + conn.commit() - time.sleep(1) + time.sleep(1) if __name__=="__main__": - cp = ConfigParser() - cp.read("search.ini") + cp = ConfigParser() + cp.read("search.ini") - threadwrapper(doit) + threadwrapper(doit) diff --git a/tools/varnishqueue/nagios_check.py b/tools/varnishqueue/nagios_check.py index 5871c427..b9fa622e 100755 --- a/tools/varnishqueue/nagios_check.py +++ b/tools/varnishqueue/nagios_check.py @@ -10,30 +10,30 @@ WARNING_THRESHOLD=timedelta(minutes=5) CRITICAL_THRESHOLD=timedelta(minutes=15) if __name__ == "__main__": - if len(sys.argv) != 2: - print "Usage: nagios_check.py " - sys.exit(1) + if len(sys.argv) != 2: + print "Usage: nagios_check.py " + sys.exit(1) - conn = psycopg2.connect(sys.argv[1]) - curs = conn.cursor() + conn = psycopg2.connect(sys.argv[1]) + curs = conn.cursor() - # Get the oldest entry that has not been completed, if any - curs.execute("SELECT COALESCE(max(now()-added), '0') FROM varnishqueue.queue WHERE completed IS NULL") - rows = curs.fetchall() - conn.close() + # Get the oldest entry that has not been completed, if any + curs.execute("SELECT COALESCE(max(now()-added), '0') FROM varnishqueue.queue WHERE completed IS NULL") + rows = curs.fetchall() + conn.close() - if len(rows) == 0: - print "OK, queue is empty" - sys.exit(0) + if len(rows) == 0: + print "OK, queue is empty" + sys.exit(0) - age = rows[0][0] + age = rows[0][0] - if age < WARNING_THRESHOLD: - print "OK, queue age is %s" % age - sys.exit(0) - elif age < CRITICAL_THRESHOLD: - print "WARNING, queue age is %s" % age - sys.exit(1) - else: - print "CRITICAL, queue age is %s" % age - sys.exit(2) + if age < WARNING_THRESHOLD: + print "OK, queue age is %s" % age + sys.exit(0) + elif age < CRITICAL_THRESHOLD: + print "WARNING, queue age is %s" % age + sys.exit(1) + else: + print "CRITICAL, queue age is %s" % age + sys.exit(2) diff --git a/tools/varnishqueue/varnish_queue.py b/tools/varnishqueue/varnish_queue.py index 1e66ea57..c66d87dc 100755 --- a/tools/varnishqueue/varnish_queue.py +++ b/tools/varnishqueue/varnish_queue.py @@ -16,135 +16,135 @@ import psycopg2 from setproctitle import setproctitle def do_purge(consumername, headers): - try: - conn = httplib.HTTPSConnection('%s.postgresql.org' % consumername) - conn.request("GET", "/varnish-purge-url", '', headers) - resp = conn.getresponse() - conn.close() - if resp.status == 200: - return True - logging.warning("Varnish purge on %s returned status %s (%s)" % (consumername, resp.status, resp.reason)) - return False - except Exception, ex: - logging.error("Exception purging on %s: %s" % (consumername, ex)) - return False - return True + try: + conn = httplib.HTTPSConnection('%s.postgresql.org' % consumername) + conn.request("GET", "/varnish-purge-url", '', headers) + resp = conn.getresponse() + conn.close() + if resp.status == 200: + return True + logging.warning("Varnish purge on %s returned status %s (%s)" % (consumername, resp.status, resp.reason)) + return False + except Exception, ex: + logging.error("Exception purging on %s: %s" % (consumername, ex)) + return False + return True def worker(consumerid, consumername, dsn): - logging.info("Starting worker for %s" % consumername) - setproctitle("varnish_queue - worker for %s" % consumername) + logging.info("Starting worker for %s" % consumername) + setproctitle("varnish_queue - worker for %s" % consumername) - conn = psycopg2.connect(dsn) - curs = conn.cursor() - curs.execute("LISTEN varnishqueue") - conn.commit() + conn = psycopg2.connect(dsn) + curs = conn.cursor() + curs.execute("LISTEN varnishqueue") + conn.commit() - while True: - # See if there is something to pick up off the queue - curs.execute("SELECT id, mode, expr FROM varnishqueue.queue WHERE consumerid=%(consumerid)s AND completed IS NULL FOR UPDATE", { - 'consumerid': consumerid, - }) - res = curs.fetchall() + while True: + # See if there is something to pick up off the queue + curs.execute("SELECT id, mode, expr FROM varnishqueue.queue WHERE consumerid=%(consumerid)s AND completed IS NULL FOR UPDATE", { + 'consumerid': consumerid, + }) + res = curs.fetchall() - failed = False + failed = False - if len(res): - idlist = [] - for r in res: - # Do something with this entry... - if r[1] == 'P': - logging.info("Purging url %s on %s" % (r[2], consumername)) - if not do_purge(consumername, {'X-Purge-URL': r[2]}): - # Failed, but we will try again, so don't add to list of removals - failed = True - continue - elif r[1] == 'X': - logging.info("Purging expression %s on %s" % (r[2], consumername)) - if not do_purge(consumername, {'X-Purge-Expr': r[2]}): - failed = True - continue - elif r[1] == 'K': - logging.info("Purging xkey %s on %s" % (r[2], consumername)) - if not do_purge(consumername, {'X-Purge-Xkey': r[2]}): - failed = True - continue - else: - logging.warning("Unknown purge type %s on %s, ignoring." % (r[1], consumername)) + if len(res): + idlist = [] + for r in res: + # Do something with this entry... + if r[1] == 'P': + logging.info("Purging url %s on %s" % (r[2], consumername)) + if not do_purge(consumername, {'X-Purge-URL': r[2]}): + # Failed, but we will try again, so don't add to list of removals + failed = True + continue + elif r[1] == 'X': + logging.info("Purging expression %s on %s" % (r[2], consumername)) + if not do_purge(consumername, {'X-Purge-Expr': r[2]}): + failed = True + continue + elif r[1] == 'K': + logging.info("Purging xkey %s on %s" % (r[2], consumername)) + if not do_purge(consumername, {'X-Purge-Xkey': r[2]}): + failed = True + continue + else: + logging.warning("Unknown purge type %s on %s, ignoring." % (r[1], consumername)) - # Schedule for removal - idlist.append(r[0]) + # Schedule for removal + idlist.append(r[0]) - # Then remove from queue - curs.execute("UPDATE varnishqueue.queue SET completed=CURRENT_TIMESTAMP WHERE id=ANY(%(idlist)s)", { - 'idlist': idlist - }) - conn.commit() - if failed: - time.sleep(5) - else: - # Nothing, so roll back the transaction and wait - conn.rollback() + # Then remove from queue + curs.execute("UPDATE varnishqueue.queue SET completed=CURRENT_TIMESTAMP WHERE id=ANY(%(idlist)s)", { + 'idlist': idlist + }) + conn.commit() + if failed: + time.sleep(5) + else: + # Nothing, so roll back the transaction and wait + conn.rollback() - select.select([conn],[],[],5*60) - conn.poll() - while conn.notifies: - conn.notifies.pop() - # Loop back up and process the full queue + select.select([conn],[],[],5*60) + conn.poll() + while conn.notifies: + conn.notifies.pop() + # Loop back up and process the full queue def housekeeper(dsn): - logging.info("Starting housekeeper") - setproctitle("varnish_queue - housekeeper") - conn = psycopg2.connect(dsn) - curs = conn.cursor() + logging.info("Starting housekeeper") + setproctitle("varnish_queue - housekeeper") + conn = psycopg2.connect(dsn) + curs = conn.cursor() - while True: - curs.execute("DELETE FROM varnishqueue.queue WHERE completed IS NOT NULL") - if curs.rowcount > 0: - conn.commit() - else: - conn.rollback() - time.sleep(5*60) + while True: + curs.execute("DELETE FROM varnishqueue.queue WHERE completed IS NOT NULL") + if curs.rowcount > 0: + conn.commit() + else: + conn.rollback() + time.sleep(5*60) if __name__ == "__main__": - if len(sys.argv) != 2: - print "Usage: varnish_queue.py " - sys.exit(1) + if len(sys.argv) != 2: + print "Usage: varnish_queue.py " + sys.exit(1) - logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO) + logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO) - conn = psycopg2.connect(sys.argv[1]) + conn = psycopg2.connect(sys.argv[1]) - curs = conn.cursor() - curs.execute("SELECT consumerid, consumer FROM varnishqueue.consumers") - consumers = curs.fetchall() - conn.close() + curs = conn.cursor() + curs.execute("SELECT consumerid, consumer FROM varnishqueue.consumers") + consumers = curs.fetchall() + conn.close() - # Now spawn a worker for each - processes = [] - for consumerid, consumername in consumers: - p = multiprocessing.Process(target=worker, args=(consumerid, consumername, sys.argv[1])) - p.start() - processes.append(p) + # Now spawn a worker for each + processes = [] + for consumerid, consumername in consumers: + p = multiprocessing.Process(target=worker, args=(consumerid, consumername, sys.argv[1])) + p.start() + processes.append(p) - # Start a housekeeping process as well - p = multiprocessing.Process(target=housekeeper, args=(sys.argv[1],)) - p.start() - processes.append(p) + # Start a housekeeping process as well + p = multiprocessing.Process(target=housekeeper, args=(sys.argv[1],)) + p.start() + processes.append(p) - # They should never die, but if they do, commit suicide and - # restart everything. - while True: - processes[0].join(timeout=120) - for p in processes: - if not p.is_alive(): - logging.warning("Child process died, killing all and existing") - for p2 in processes: - try: - p2.terminate() - except: - pass - logging.error("Children killed, existing") - sys.exit(1) - # If all processes are alive, loop back up and try again + # They should never die, but if they do, commit suicide and + # restart everything. + while True: + processes[0].join(timeout=120) + for p in processes: + if not p.is_alive(): + logging.warning("Child process died, killing all and existing") + for p2 in processes: + try: + p2.terminate() + except: + pass + logging.error("Children killed, existing") + sys.exit(1) + # If all processes are alive, loop back up and try again