diff --git a/pgweb/account/admin.py b/pgweb/account/admin.py index 6127db3c..3c1771c0 100644 --- a/pgweb/account/admin.py +++ b/pgweb/account/admin.py @@ -8,6 +8,7 @@ import base64 from models import CommunityAuthSite, CommunityAuthOrg + class CommunityAuthSiteAdminForm(forms.ModelForm): class Meta: model = CommunityAuthSite @@ -24,9 +25,11 @@ class CommunityAuthSiteAdminForm(forms.ModelForm): raise forms.ValidationError("Crypto key must be 16, 24 or 32 bytes before being base64-encoded") return self.cleaned_data['cryptkey'] + class CommunityAuthSiteAdmin(admin.ModelAdmin): form = CommunityAuthSiteAdminForm + class PGUserChangeForm(UserChangeForm): """just like UserChangeForm, butremoves "username" requirement""" def __init__(self, *args, **kwargs): @@ -38,6 +41,7 @@ class PGUserChangeForm(UserChangeForm): if self.fields.get('username'): del self.fields['username'] + class PGUserAdmin(UserAdmin): """overrides default Django user admin""" form = PGUserChangeForm @@ -48,7 +52,8 @@ class PGUserAdmin(UserAdmin): return self.readonly_fields + ('username',) return self.readonly_fields + admin.site.register(CommunityAuthSite, CommunityAuthSiteAdmin) admin.site.register(CommunityAuthOrg) -admin.site.unregister(User) # have to unregister default User Admin... -admin.site.register(User, PGUserAdmin) # ...in order to add overrides +admin.site.unregister(User) # have to unregister default User Admin... +admin.site.register(User, PGUserAdmin) # ...in order to add overrides diff --git a/pgweb/account/forms.py b/pgweb/account/forms.py index d7b67d32..a1652c71 100644 --- a/pgweb/account/forms.py +++ b/pgweb/account/forms.py @@ -12,6 +12,7 @@ from recaptcha import ReCaptchaField import logging log = logging.getLogger(__name__) + def _clean_username(username): username = username.lower() @@ -23,6 +24,7 @@ def _clean_username(username): return username raise forms.ValidationError("This username is already in use") + # Override some error handling only in the default authentication form class PgwebAuthenticationForm(AuthenticationForm): def clean(self): @@ -38,6 +40,7 @@ class PgwebAuthenticationForm(AuthenticationForm): return self.cleaned_data raise e + class CommunityAuthConsentForm(forms.Form): consent = forms.BooleanField(help_text='Consent to sharing this data') next = forms.CharField(widget=forms.widgets.HiddenInput()) @@ -48,6 +51,7 @@ class CommunityAuthConsentForm(forms.Form): self.fields['consent'].label = 'Consent to sharing data with {0}'.format(self.orgname) + class SignupForm(forms.Form): username = forms.CharField(max_length=30) first_name = forms.CharField(max_length=30) @@ -84,6 +88,7 @@ class SignupForm(forms.Form): return email raise forms.ValidationError("A user with this email address is already registered") + class SignupOauthForm(forms.Form): username = forms.CharField(max_length=30) first_name = forms.CharField(max_length=30, required=False) @@ -106,25 +111,30 @@ class SignupOauthForm(forms.Form): def clean_email(self): return self.cleaned_data['email'].lower() + class UserProfileForm(forms.ModelForm): class Meta: model = UserProfile exclude = ('user',) + class UserForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(UserForm, self).__init__(*args, **kwargs) self.fields['first_name'].required = True self.fields['last_name'].required = True + class Meta: model = User fields = ('first_name', 'last_name', ) + class ContributorForm(forms.ModelForm): class Meta: model = Contributor exclude = ('ctype', 'lastname', 'firstname', 'user', ) + class ChangeEmailForm(forms.Form): email = forms.EmailField() email2 = forms.EmailField(label="Repeat email") @@ -156,5 +166,6 @@ class ChangeEmailForm(forms.Form): raise forms.ValidationError("Email addresses don't match") return email2 + class PgwebPasswordResetForm(forms.Form): email = forms.EmailField() diff --git a/pgweb/account/models.py b/pgweb/account/models.py index 0c4de48b..8db9c5f7 100644 --- a/pgweb/account/models.py +++ b/pgweb/account/models.py @@ -1,6 +1,7 @@ from django.db import models from django.contrib.auth.models import User + class CommunityAuthOrg(models.Model): orgname = models.CharField(max_length=100, null=False, blank=False, help_text="Name of the organisation") @@ -9,6 +10,7 @@ class CommunityAuthOrg(models.Model): def __unicode__(self): return self.orgname + class CommunityAuthSite(models.Model): name = models.CharField(max_length=100, null=False, blank=False, help_text="Note that the value in this field is shown on the login page, so make sure it's user-friendly!") @@ -23,6 +25,7 @@ class CommunityAuthSite(models.Model): def __unicode__(self): return self.name + class CommunityAuthConsent(models.Model): user = models.ForeignKey(User, null=False, blank=False) org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False) @@ -31,6 +34,7 @@ class CommunityAuthConsent(models.Model): class Meta: unique_together = (('user', 'org'), ) + class EmailChangeToken(models.Model): user = models.OneToOneField(User, null=False, blank=False) email = models.EmailField(max_length=75, null=False, blank=False) diff --git a/pgweb/account/oauthclient.py b/pgweb/account/oauthclient.py index 27c79848..432a5188 100644 --- a/pgweb/account/oauthclient.py +++ b/pgweb/account/oauthclient.py @@ -14,6 +14,7 @@ log = logging.getLogger(__name__) class OAuthException(Exception): pass + # # Generic OAuth login for multiple providers # @@ -97,10 +98,13 @@ def oauth_login_google(request): 'google', 'https://accounts.google.com/o/oauth2/v2/auth', 'https://accounts.google.com/o/oauth2/token', - ['https://www.googleapis.com/auth/userinfo.email', - 'https://www.googleapis.com/auth/userinfo.profile'], + [ + 'https://www.googleapis.com/auth/userinfo.email', + 'https://www.googleapis.com/auth/userinfo.profile' + ], _google_auth_data) + # # Github login # Registration: https://github.com/settings/developers @@ -119,7 +123,7 @@ def oauth_login_github(request): else: # Some github accounts have no name on them, so we can just # let the user fill it out manually in that case. - n = ['',''] + n = ['', ''] # Email is at a separate endpoint r = oa.get('https://api.github.com/user/emails').json() for e in r: @@ -139,6 +143,7 @@ def oauth_login_github(request): ['user:email', ], _github_auth_data) + # # Facebook login # Registration: https://developers.facebook.com/apps @@ -181,7 +186,7 @@ def oauth_login_microsoft(request): 'microsoft', 'https://login.live.com/oauth20_authorize.srf', 'https://login.live.com/oauth20_token.srf', - ['wl.basic', 'wl.emails' ], + ['wl.basic', 'wl.emails', ], _microsoft_auth_data) diff --git a/pgweb/account/recaptcha.py b/pgweb/account/recaptcha.py index 9b667955..e86bd2a8 100644 --- a/pgweb/account/recaptcha.py +++ b/pgweb/account/recaptcha.py @@ -14,6 +14,7 @@ import json import logging log = logging.getLogger(__name__) + class ReCaptchaWidget(forms.widgets.Widget): def render(self, name, value, attrs=None): if settings.NOCAPTCHA: diff --git a/pgweb/account/views.py b/pgweb/account/views.py index f210013e..a3f37e64 100644 --- a/pgweb/account/views.py +++ b/pgweb/account/views.py @@ -44,7 +44,8 @@ log = logging.getLogger(__name__) # The value we store in user.password for oauth logins. This is # a value that must not match any hashers. -OAUTH_PASSWORD_STORE='oauth_signin_account_no_password' +OAUTH_PASSWORD_STORE = 'oauth_signin_account_no_password' + @login_required def home(request): @@ -61,6 +62,7 @@ def home(request): 'profservs': myprofservs, }) + objtypes = { 'news': { 'title': 'News Article', @@ -85,6 +87,7 @@ objtypes = { }, } + @login_required @transaction.atomic def profile(request): @@ -128,11 +131,12 @@ def profile(request): contribform = ContributorForm(instance=contrib) return render_pgweb(request, 'account', 'account/userprofileform.html', { - 'userform': userform, - 'profileform': profileform, - 'contribform': contribform, - 'can_change_email': can_change_email, - }) + 'userform': userform, + 'profileform': profileform, + 'contribform': contribform, + 'can_change_email': can_change_email, + }) + @login_required @transaction.atomic @@ -158,12 +162,13 @@ def change_email(request): token=generate_random_token()) token.save() - send_template_mail(settings.ACCOUNTS_NOREPLY_FROM, - form.cleaned_data['email'], - 'Your postgresql.org community account', - 'account/email_change_email.txt', - { 'token': token , 'user': request.user, } - ) + send_template_mail( + settings.ACCOUNTS_NOREPLY_FROM, + form.cleaned_data['email'], + 'Your postgresql.org community account', + 'account/email_change_email.txt', + {'token': token, 'user': request.user, } + ) return HttpResponseRedirect('done/') else: form = ChangeEmailForm(request.user) @@ -171,7 +176,8 @@ def change_email(request): return render_pgweb(request, 'account', 'account/emailchangeform.html', { 'form': form, 'token': token, - }) + }) + @login_required @transaction.atomic @@ -193,7 +199,8 @@ def confirm_change_email(request, tokenhash): return render_pgweb(request, 'account', 'account/emailchangecompleted.html', { 'token': tokenhash, 'success': token and True or False, - }) + }) + @login_required def listobjects(request, objtype): @@ -211,24 +218,28 @@ def listobjects(request, objtype): 'suburl': objtype, }) + @login_required def orglist(request): orgs = Organisation.objects.filter(approved=True) return render_pgweb(request, 'account', 'account/orglist.html', { - 'orgs': orgs, + 'orgs': orgs, }) + def login(request): return authviews.login(request, template_name='account/login.html', authentication_form=PgwebAuthenticationForm, extra_context={ - 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())], + 'oauth_providers': [(k, v) for k, v in sorted(settings.OAUTH.items())], }) + def logout(request): return authviews.logout_then_login(request, login_url='/') + def changepwd(request): if hasattr(request.user, 'password') and request.user.password == OAUTH_PASSWORD_STORE: return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.") @@ -238,6 +249,7 @@ def changepwd(request): template_name='account/password_change.html', post_change_redirect='/account/changepwd/done/') + def resetpwd(request): # Basic django password reset feature is completely broken. For example, it does not support # resetting passwords for users with "old hashes", which means they have no way to ever @@ -255,32 +267,36 @@ def resetpwd(request): if form.is_valid(): log.info("Initiating password set from {0} for {1}".format(get_client_ip(request), form.cleaned_data['email'])) token = default_token_generator.make_token(u) - send_template_mail(settings.ACCOUNTS_NOREPLY_FROM, - form.cleaned_data['email'], - 'Password reset for your postgresql.org account', - 'account/password_reset_email.txt', - { - 'user': u, - 'uid': urlsafe_base64_encode(force_bytes(u.pk)), - 'token': token, - }, + send_template_mail( + settings.ACCOUNTS_NOREPLY_FROM, + form.cleaned_data['email'], + 'Password reset for your postgresql.org account', + 'account/password_reset_email.txt', + { + 'user': u, + 'uid': urlsafe_base64_encode(force_bytes(u.pk)), + 'token': token, + }, ) return HttpResponseRedirect('/account/reset/done/') else: form = PgwebPasswordResetForm() return render_pgweb(request, 'account', 'account/password_reset.html', { - 'form': form, + 'form': form, }) + def change_done(request): log.info("Password change done from {0}".format(get_client_ip(request))) return authviews.password_change_done(request, template_name='account/password_change_done.html') + def reset_done(request): log.info("Password reset done from {0}".format(get_client_ip(request))) return authviews.password_reset_done(request, template_name='account/password_reset_done.html') + def reset_confirm(request, uidb64, token): log.info("Confirming password reset for uidb {0}, token {1} from {2}".format(uidb64, token, get_client_ip(request))) return authviews.password_reset_confirm(request, @@ -289,10 +305,12 @@ def reset_confirm(request, uidb64, token): template_name='account/password_reset_confirm.html', post_reset_redirect='/account/reset/complete/') + def reset_complete(request): log.info("Password reset completed for user from {0}".format(get_client_ip(request))) return authviews.password_reset_complete(request, template_name='account/password_reset_complete.html') + @script_sources('https://www.google.com/recaptcha/') @script_sources('https://www.gstatic.com/recaptcha/') @frame_sources('https://www.google.com/') @@ -326,7 +344,7 @@ def signup(request): form.cleaned_data['email'], 'Your new postgresql.org community account', 'account/new_account_email.txt', - { 'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user} + {'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user} ) return HttpResponseRedirect('/account/signup/complete/') @@ -334,16 +352,16 @@ def signup(request): form = SignupForm(get_client_ip(request)) return render_pgweb(request, 'account', 'base/form.html', { - 'form': form, - 'formitemtype': 'Account', - 'form_intro': """ + 'form': form, + 'formitemtype': 'Account', + 'form_intro': """ To sign up for a free community account, enter your preferred userid and email address. Note that a community account is only needed if you want to submit information - all content is available for reading without an account. """, - 'savebutton': 'Sign up', - 'operation': 'New', - 'recaptcha': True, + 'savebutton': 'Sign up', + 'operation': 'New', + 'recaptcha': True, }) @@ -429,12 +447,12 @@ def signup_oauth(request): 'operation': 'New account', 'savebutton': 'Sign up for new account', 'recaptcha': True, - }) + }) + #### -## Community authentication endpoint +# Community authentication endpoint #### - def communityauth(request, siteid): # Get whatever site the user is trying to log in to. site = get_object_or_404(CommunityAuthSite, pk=siteid) @@ -476,23 +494,24 @@ def communityauth(request, siteid): nexturl = request.POST['next'] else: nexturl = '/account/auth/%s/%s' % (siteid, urldata) - return authviews.login(request, template_name='account/login.html', - authentication_form=PgwebAuthenticationForm, - extra_context={ - 'sitename': site.name, - 'next': nexturl, - 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())], - }, - ) + return authviews.login( + request, template_name='account/login.html', + authentication_form=PgwebAuthenticationForm, + extra_context={ + 'sitename': site.name, + 'next': nexturl, + 'oauth_providers': [(k, v) for k, v in sorted(settings.OAUTH.items())], + }, + ) # When we reach this point, the user *has* already been authenticated. # The request variable "su" *may* contain a suburl and should in that # case be passed along to the site we're authenticating for. And of # course, we fill a structure with information about the user. - if request.user.first_name=='' or request.user.last_name=='' or request.user.email=='': + if request.user.first_name == '' or request.user.last_name == '' or request.user.email == '': return render_pgweb(request, 'account', 'account/communityauth_noinfo.html', { - }) + }) # Check for cooloff period if site.cooloff_hours > 0: @@ -501,7 +520,7 @@ def communityauth(request, siteid): request.user.username, site.name)) return render_pgweb(request, 'account', 'account/communityauth_cooloff.html', { 'site': site, - }) + }) if site.org.require_consent: if not CommunityAuthConsent.objects.filter(org=site.org, user=request.user).exists(): @@ -513,7 +532,7 @@ def communityauth(request, siteid): 'f': request.user.first_name.encode('utf-8'), 'l': request.user.last_name.encode('utf-8'), 'e': request.user.email.encode('utf-8'), - } + } if d: info['d'] = d.encode('utf-8') elif su: @@ -525,16 +544,16 @@ def communityauth(request, siteid): # Encrypt it with the shared key (and IV!) r = Random.new() - iv = r.read(16) # Always 16 bytes for AES + iv = r.read(16) # Always 16 bytes for AES encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv) - cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes + cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16))) # Pad to even 16 bytes # Generate redirect return HttpResponseRedirect("%s?i=%s&d=%s" % ( - site.redirecturl, - base64.b64encode(iv, "-_"), - base64.b64encode(cipher, "-_"), - )) + site.redirecturl, + base64.b64encode(iv, "-_"), + base64.b64encode(cipher, "-_"), + )) def communityauth_logout(request, siteid): @@ -547,6 +566,7 @@ def communityauth_logout(request, siteid): # Redirect user back to the specified suburl return HttpResponseRedirect("%s?s=logout" % site.redirecturl) + @login_required def communityauth_consent(request, siteid): org = get_object_or_404(CommunityAuthSite, id=siteid).org @@ -554,7 +574,7 @@ def communityauth_consent(request, siteid): form = CommunityAuthConsentForm(org.orgname, data=request.POST) if form.is_valid(): CommunityAuthConsent.objects.get_or_create(user=request.user, org=org, - defaults={'consentgiven':datetime.now()}, + defaults={'consentgiven': datetime.now()}, ) return HttpResponseRedirect(form.cleaned_data['next']) else: @@ -571,9 +591,9 @@ def communityauth_consent(request, siteid): def _encrypt_site_response(site, s): # Encrypt it with the shared key (and IV!) r = Random.new() - iv = r.read(16) # Always 16 bytes for AES + iv = r.read(16) # Always 16 bytes for AES encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv) - cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes + cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16))) # Pad to even 16 bytes # Base64-encode the response, just to be consistent return "%s&%s" % ( @@ -581,6 +601,7 @@ def _encrypt_site_response(site, s): base64.b64encode(cipher, '-_'), ) + def communityauth_search(request, siteid): # Perform a search for users. The response will be encrypted with the site # key to prevent abuse, therefor we need the site. @@ -605,6 +626,7 @@ def communityauth_search(request, siteid): return HttpResponse(_encrypt_site_response(site, j)) + def communityauth_getkeys(request, siteid, since=None): # Get any updated ssh keys for community accounts. # The response will be encrypted with the site key to prevent abuse, diff --git a/pgweb/contributors/admin.py b/pgweb/contributors/admin.py index 2f0ec4f2..e8a3107e 100644 --- a/pgweb/contributors/admin.py +++ b/pgweb/contributors/admin.py @@ -7,6 +7,7 @@ from pgweb.core.lookups import UserLookup from models import Contributor, ContributorType + class ContributorAdminForm(forms.ModelForm): class Meta: model = Contributor @@ -20,8 +21,10 @@ class ContributorAdminForm(forms.ModelForm): self.fields['user'].widget.can_add_related = False self.fields['user'].widget.can_change_related = False + class ContributorAdmin(admin.ModelAdmin): form = ContributorAdminForm + admin.site.register(ContributorType) admin.site.register(Contributor, ContributorAdmin) diff --git a/pgweb/contributors/models.py b/pgweb/contributors/models.py index 43cd3bae..f75f8c85 100644 --- a/pgweb/contributors/models.py +++ b/pgweb/contributors/models.py @@ -1,6 +1,7 @@ from django.db import models from django.contrib.auth.models import User + class ContributorType(models.Model): typename = models.CharField(max_length=32, null=False, blank=False) sortorder = models.IntegerField(null=False, default=100) @@ -16,6 +17,7 @@ class ContributorType(models.Model): class Meta: ordering = ('sortorder',) + class Contributor(models.Model): ctype = models.ForeignKey(ContributorType) lastname = models.CharField(max_length=100, null=False, blank=False) @@ -27,7 +29,7 @@ class Contributor(models.Model): contribution = models.TextField(null=True, blank=True) user = models.ForeignKey(User, null=True, blank=True) - send_notification=True + send_notification = True purge_urls = ('/community/contributors/', ) def __unicode__(self): diff --git a/pgweb/contributors/views.py b/pgweb/contributors/views.py index 0be074a4..da068aa5 100644 --- a/pgweb/contributors/views.py +++ b/pgweb/contributors/views.py @@ -2,6 +2,7 @@ from pgweb.util.contexts import render_pgweb from models import ContributorType + def completelist(request): contributortypes = list(ContributorType.objects.all()) return render_pgweb(request, 'community', 'contributors/list.html', { diff --git a/pgweb/core/admin.py b/pgweb/core/admin.py index b03f1de0..0bfc57da 100644 --- a/pgweb/core/admin.py +++ b/pgweb/core/admin.py @@ -9,6 +9,7 @@ from pgweb.core.models import ModerationNotification from pgweb.core.lookups import UserLookup + class OrganisationAdminForm(forms.ModelForm): class Meta: model = Organisation @@ -23,6 +24,7 @@ class OrganisationAdminForm(forms.ModelForm): self.fields['managers'].widget.can_change_related = False self.fields['managers'].widget.can_delete_related = False + class OrganisationAdmin(admin.ModelAdmin): form = OrganisationAdminForm list_display = ('name', 'approved', 'lastconfirmed',) @@ -30,13 +32,14 @@ class OrganisationAdmin(admin.ModelAdmin): ordering = ('name', ) search_fields = ('name', ) + class VersionAdmin(admin.ModelAdmin): list_display = ('versionstring', 'reldate', 'supported', 'current', ) + admin.site.register(Version, VersionAdmin) admin.site.register(OrganisationType) admin.site.register(Organisation, OrganisationAdmin) admin.site.register(ImportedRSSFeed) admin.site.register(ImportedRSSItem) admin.site.register(ModerationNotification) - diff --git a/pgweb/core/feeds.py b/pgweb/core/feeds.py index 45a0ee67..99b268a9 100644 --- a/pgweb/core/feeds.py +++ b/pgweb/core/feeds.py @@ -4,6 +4,7 @@ from models import Version from datetime import datetime, time + class VersionFeed(Feed): title = "PostgreSQL latest versions" link = "https://www.postgresql.org/" @@ -19,4 +20,4 @@ class VersionFeed(Feed): return "https://www.postgresql.org/docs/%s/%s" % (obj.numtree, obj.relnotes) def item_pubdate(self, obj): - return datetime.combine(obj.reldate,time.min) + return datetime.combine(obj.reldate, time.min) diff --git a/pgweb/core/forms.py b/pgweb/core/forms.py index 80e41876..ac0716c4 100644 --- a/pgweb/core/forms.py +++ b/pgweb/core/forms.py @@ -4,6 +4,7 @@ from django.forms import ValidationError from models import Organisation from django.contrib.auth.models import User + class OrganisationForm(forms.ModelForm): remove_manager = forms.ModelMultipleChoiceField(required=False, queryset=None, label="Current manager(s)", help_text="Select one or more managers to remove") add_manager = forms.EmailField(required=False) @@ -54,6 +55,7 @@ class OrganisationForm(forms.ModelForm): def apply_submitter(self, model, User): model.managers.add(User) + class MergeOrgsForm(forms.Form): merge_into = forms.ModelChoiceField(queryset=Organisation.objects.all()) merge_from = forms.ModelChoiceField(queryset=Organisation.objects.all()) diff --git a/pgweb/core/lookups.py b/pgweb/core/lookups.py index 3f64cb2e..3d42ffe5 100644 --- a/pgweb/core/lookups.py +++ b/pgweb/core/lookups.py @@ -22,4 +22,5 @@ class UserLookup(ModelLookup): # Display for choice listings return u"%s (%s)" % (item.username, item.get_full_name()) + registry.register(UserLookup) diff --git a/pgweb/core/management/commands/cleanup_old_records.py b/pgweb/core/management/commands/cleanup_old_records.py index 4fda8693..27d3bc13 100644 --- a/pgweb/core/management/commands/cleanup_old_records.py +++ b/pgweb/core/management/commands/cleanup_old_records.py @@ -17,6 +17,7 @@ from datetime import datetime, timedelta from pgweb.account.models import EmailChangeToken + class Command(BaseCommand): help = 'Cleanup old records' @@ -32,4 +33,4 @@ class Command(BaseCommand): # Clean up old email change tokens with transaction.atomic(): - EmailChangeToken.objects.filter(sentat__lt=datetime.now()-timedelta(hours=24)).delete() + EmailChangeToken.objects.filter(sentat__lt=datetime.now() - timedelta(hours=24)).delete() diff --git a/pgweb/core/management/commands/fetch_rss_feeds.py b/pgweb/core/management/commands/fetch_rss_feeds.py index 44c6ee7c..39bfc9c7 100644 --- a/pgweb/core/management/commands/fetch_rss_feeds.py +++ b/pgweb/core/management/commands/fetch_rss_feeds.py @@ -13,6 +13,7 @@ from datetime import datetime from pgweb.core.models import ImportedRSSFeed, ImportedRSSItem + class Command(BaseCommand): help = 'Fetch RSS feeds' @@ -27,7 +28,7 @@ class Command(BaseCommand): if not hasattr(feed, 'status'): # bozo_excpetion can seemingly be set when there is no error as well, # so make sure we only check if we didn't get a status. - if hasattr(feed,'bozo_exception'): + if hasattr(feed, 'bozo_exception'): raise Exception('Feed load error %s' % feed.bozo_exception) raise Exception('Feed load error with no exception!') if feed.status != 200: @@ -38,10 +39,11 @@ class Command(BaseCommand): try: item = ImportedRSSItem.objects.get(feed=importfeed, url=entry.link) except ImportedRSSItem.DoesNotExist: - item = ImportedRSSItem(feed=importfeed, - title=entry.title[:100], - url=entry.link, - posttime=datetime(*(entry.published_parsed[0:6])), + item = ImportedRSSItem( + feed=importfeed, + title=entry.title[:100], + url=entry.link, + posttime=datetime(*(entry.published_parsed[0:6])), ) item.save() fetchedsomething = True diff --git a/pgweb/core/management/commands/moderation_report.py b/pgweb/core/management/commands/moderation_report.py index cdc3f28a..c1be3ffb 100644 --- a/pgweb/core/management/commands/moderation_report.py +++ b/pgweb/core/management/commands/moderation_report.py @@ -14,6 +14,7 @@ from datetime import datetime from pgweb.util.moderation import get_all_pending_moderations from pgweb.util.misc import send_template_mail + class Command(BaseCommand): help = 'Send moderation report' @@ -28,4 +29,4 @@ class Command(BaseCommand): "core/moderation_report.txt", { 'items': counts, - }) + }) diff --git a/pgweb/core/management/commands/sessioninfo.py b/pgweb/core/management/commands/sessioninfo.py index aa5aaa3e..0b8e2fdb 100644 --- a/pgweb/core/management/commands/sessioninfo.py +++ b/pgweb/core/management/commands/sessioninfo.py @@ -6,6 +6,7 @@ from django.core.management.base import BaseCommand, CommandError from django.contrib.sessions.models import Session from django.contrib.auth.models import User + class Command(BaseCommand): help = 'Dump interesting information about a session' @@ -34,9 +35,8 @@ class Command(BaseCommand): session.pop(k, None) if session: print " -- Other session values --" - for k,v in session.items(): - print u"{0:20} {1}".format(k,v) + for k, v in session.items(): + print u"{0:20} {1}".format(k, v) except Session.DoesNotExist: raise CommandError('Session not found') - diff --git a/pgweb/core/migrations/0001_initial.py b/pgweb/core/migrations/0001_initial.py index 65fb2bab..2f493d30 100644 --- a/pgweb/core/migrations/0001_initial.py +++ b/pgweb/core/migrations/0001_initial.py @@ -6,6 +6,7 @@ from django.conf import settings import pgweb.core.models + class Migration(migrations.Migration): dependencies = [ diff --git a/pgweb/core/models.py b/pgweb/core/models.py index d1d74bd9..93a8b8f9 100644 --- a/pgweb/core/models.py +++ b/pgweb/core/models.py @@ -10,9 +10,10 @@ TESTING_CHOICES = ( (1, 'Release candidate'), (2, 'Beta'), (3, 'Alpha'), - ) +) TESTING_SHORTSTRING = ('', 'rc', 'beta', 'alpha') + class Version(models.Model): tree = models.DecimalField(max_digits=3, decimal_places=1, null=False, blank=False, unique=True) latestminor = models.IntegerField(null=False, blank=False, default=0, help_text="For testing versions, latestminor means latest beta/rc number. For other releases, it's the latest minor release number in the tree.") @@ -63,7 +64,7 @@ class Version(models.Model): for p in previous: if not p == self: p.current = False - p.save() # primary key check avoids recursion + p.save() # primary key check avoids recursion # Now that we've made any previously current ones non-current, we are # free to save this one. @@ -95,6 +96,7 @@ class Country(models.Model): def __unicode__(self): return self.name + class Language(models.Model): # Import data from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt # (yes, there is a UTF16 BOM in the UTF8 file) @@ -111,12 +113,14 @@ class Language(models.Model): def __unicode__(self): return self.name + class OrganisationType(models.Model): typename = models.CharField(max_length=32, null=False, blank=False) def __unicode__(self): return self.typename + class Organisation(models.Model): name = models.CharField(max_length=100, null=False, blank=False, unique=True) approved = models.BooleanField(null=False, default=False) @@ -151,6 +155,7 @@ class ImportedRSSFeed(models.Model): def __unicode__(self): return self.internalname + class ImportedRSSItem(models.Model): feed = models.ForeignKey(ImportedRSSFeed) title = models.CharField(max_length=100, null=False, blank=False) @@ -167,6 +172,8 @@ class ImportedRSSItem(models.Model): # From man sshd, except for ssh-dss _valid_keytypes = ['ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519', 'ssh-rsa'] + + # Options, keytype, key, comment. But we don't support options. def validate_sshkey(key): lines = key.splitlines() @@ -185,12 +192,14 @@ def validate_sshkey(key): except: raise ValidationError("Incorrect base64 encoded key!") + # Extra attributes for users (if they have them) class UserProfile(models.Model): user = models.OneToOneField(User, null=False, blank=False, primary_key=True) - sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text= "Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ]) + sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text="Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ]) lastmodified = models.DateTimeField(null=False, blank=False, auto_now=True) + # Notifications sent for any moderated content. # Yes, we uglify it by storing the type of object as a string, so we don't # end up with a bazillion fields being foreign keys. Ugly, but works. @@ -198,7 +207,7 @@ class ModerationNotification(models.Model): objectid = models.IntegerField(null=False, blank=False, db_index=True) objecttype = models.CharField(null=False, blank=False, max_length=100) text = models.TextField(null=False, blank=False) - author = models.CharField(null=False, blank=False, max_length=100) + author = models.CharField(null=False, blank=False, max_length=100) date = models.DateTimeField(null=False, blank=False, auto_now=True) def __unicode__(self): diff --git a/pgweb/core/struct.py b/pgweb/core/struct.py index 2829d1a5..e2b65a6a 100644 --- a/pgweb/core/struct.py +++ b/pgweb/core/struct.py @@ -1,5 +1,6 @@ import os + def get_struct(): yield ('', None) yield ('community/', None) diff --git a/pgweb/core/templatetags/pgfilters.py b/pgweb/core/templatetags/pgfilters.py index 2d8cce99..a166c6ff 100644 --- a/pgweb/core/templatetags/pgfilters.py +++ b/pgweb/core/templatetags/pgfilters.py @@ -5,10 +5,12 @@ import json register = template.Library() + @register.filter(name='class_name') def class_name(ob): return ob.__class__.__name__ + @register.filter(is_safe=True) def field_class(value, arg): if 'class' in value.field.widget.attrs: @@ -17,45 +19,54 @@ def field_class(value, arg): c = arg return value.as_widget(attrs={"class": c}) + @register.filter(name='hidemail') @stringfilter def hidemail(value): return value.replace('@', ' at ') + @register.filter(is_safe=True) def ischeckbox(obj): return obj.field.widget.__class__.__name__ in ["CheckboxInput", "CheckboxSelectMultiple"] and not getattr(obj.field, 'regular_field', False) + @register.filter(is_safe=True) def ismultiplecheckboxes(obj): return obj.field.widget.__class__.__name__ == "CheckboxSelectMultiple" and not getattr(obj.field, 'regular_field', False) + @register.filter(is_safe=True) def isrequired_error(obj): if obj.errors and obj.errors[0] == u"This field is required.": return True return False + @register.filter(is_safe=True) def label_class(value, arg): return value.label_tag(attrs={'class': arg}) + @register.filter() def planet_author(obj): # takes a ImportedRSSItem object from a Planet feed and extracts the author # information from the title return obj.title.split(':')[0] + @register.filter() def planet_title(obj): # takes a ImportedRSSItem object from a Planet feed and extracts the info # specific to the title of the Planet entry return ":".join(obj.title.split(':')[1:]) + @register.filter(name='dictlookup') def dictlookup(value, key): return value.get(key, None) + @register.filter(name='json') def tojson(value): return json.dumps(value) diff --git a/pgweb/core/views.py b/pgweb/core/views.py index 509a52bc..f307dd60 100644 --- a/pgweb/core/views.py +++ b/pgweb/core/views.py @@ -37,6 +37,7 @@ from pgweb.survey.models import Survey from models import Organisation from forms import OrganisationForm, MergeOrgsForm + # Front page view @cache(minutes=10) def home(request): @@ -68,6 +69,7 @@ def home(request): 'planet': planet, }) + # About page view (contains information about PostgreSQL + random quotes) @cache(minutes=10) def about(request): @@ -77,6 +79,7 @@ def about(request): 'quotes': quotes, }) + # Community main page (contains surveys and potentially more) def community(request): s = Survey.objects.filter(current=True) @@ -90,13 +93,17 @@ def community(request): 'planet': planet, }) + # List of supported versions def versions(request): return render_pgweb(request, 'support', 'support/versioning.html', { - 'versions': Version.objects.filter(tree__gt=0).filter(testing=0), + 'versions': Version.objects.filter(tree__gt=0).filter(testing=0), }) + re_staticfilenames = re.compile("^[0-9A-Z/_-]+$", re.IGNORECASE) + + # Generic fallback view for static pages def fallback(request, url): if url.find('..') > -1: @@ -116,13 +123,14 @@ def fallback(request, url): # Guestimate the nav section by looking at the URL and taking the first # piece of it. try: - navsect = url.split('/',2)[0] + navsect = url.split('/', 2)[0] except: navsect = '' c = PGWebContextProcessor(request) c.update({'navmenu': get_nav_menu(navsect)}) return HttpResponse(t.render(c)) + # Edit-forms for core objects @login_required def organisationform(request, itemid): @@ -132,6 +140,7 @@ def organisationform(request, itemid): return simple_form(Organisation, itemid, request, OrganisationForm, redirect='/account/edit/organisations/') + # robots.txt def robots(request): return HttpResponse("""User-agent: * @@ -154,7 +163,7 @@ def _make_sitemap(pagelist): x.startElement('urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'}) pages = 0 for p in pagelist: - pages+=1 + pages += 1 x.startElement('url', {}) x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0])) if len(p) > 1 and p[1]: @@ -166,11 +175,13 @@ def _make_sitemap(pagelist): x.endDocument() return resp + # Sitemap (XML format) @cache(hours=6) def sitemap(request): return _make_sitemap(get_all_pages_struct()) + # Internal sitemap (only for our own search engine) # Note! Still served up to anybody who wants it, so don't # put anything secret in it... @@ -178,17 +189,19 @@ def sitemap(request): def sitemap_internal(request): return _make_sitemap(get_all_pages_struct(method='get_internal_struct')) + # dynamic CSS serving, meaning we merge a number of different CSS into a # single one, making sure it turns into a single http response. We do this # dynamically, since the output will be cached. _dynamic_cssmap = { 'base': ['media/css/main.css', - 'media/css/normalize.css',], + 'media/css/normalize.css', ], 'docs': ['media/css/global.css', 'media/css/table.css', 'media/css/text.css', 'media/css/docs.css'], - } +} + @cache(hours=6) def dynamic_css(request, css): @@ -228,41 +241,46 @@ def dynamic_css(request, css): return resp + @nocache def csrf_failure(request, reason=''): resp = render(request, 'errors/csrf_failure.html', { - 'reason': reason, - }) - resp.status_code = 403 # Forbidden + 'reason': reason, + }) + resp.status_code = 403 # Forbidden return resp + # Basic information about the connection @cache(seconds=30) def system_information(request): - return render(request,'core/system_information.html', { - 'server': os.uname()[1], - 'cache_server': request.META['REMOTE_ADDR'] or None, - 'client_ip': get_client_ip(request), - 'django_version': django.get_version(), + return render(request, 'core/system_information.html', { + 'server': os.uname()[1], + 'cache_server': request.META['REMOTE_ADDR'] or None, + 'client_ip': get_client_ip(request), + 'django_version': django.get_version(), }) + # Sync timestamp for automirror. Keep it around for 30 seconds # Basically just a check that we can access the backend still... @cache(seconds=30) def sync_timestamp(request): s = datetime.now().strftime("%Y-%m-%d %H:%M:%S\n") - r = HttpResponse(s, content_type='text/plain') + r = HttpResponse(s, content_type='text/plain') r['Content-Length'] = len(s) return r + # List of all unapproved objects, for the special admin page @login_required @user_passes_test(lambda u: u.is_staff) @user_passes_test(lambda u: u.groups.filter(name='pgweb moderators').exists()) def admin_pending(request): return render(request, 'core/admin_pending.html', { - 'app_list': get_all_pending_moderations(), - }) + 'app_list': get_all_pending_moderations(), + }) + # Purge objects from varnish, for the admin pages @login_required @@ -297,8 +315,9 @@ def admin_purge(request): latest = curs.fetchall() return render(request, 'core/admin_purge.html', { - 'latest_purges': latest, - }) + 'latest_purges': latest, + }) + @csrf_exempt def api_varnish_purge(request): @@ -313,6 +332,7 @@ def api_varnish_purge(request): curs.execute("SELECT varnish_purge_expr(%s)", (expr, )) return HttpResponse("Purged %s entries\n" % n) + # Merge two organisations @login_required @user_passes_test(lambda u: u.is_superuser) @@ -346,5 +366,5 @@ def admin_mergeorg(request): form = MergeOrgsForm() return render(request, 'core/admin_mergeorg.html', { - 'form': form, + 'form': form, }) diff --git a/pgweb/docs/forms.py b/pgweb/docs/forms.py index c26695ff..96df3a7d 100644 --- a/pgweb/docs/forms.py +++ b/pgweb/docs/forms.py @@ -1,8 +1,9 @@ from django import forms + class DocCommentForm(forms.Form): name = forms.CharField(max_length=100, required=True, label='Your Name') email = forms.EmailField(max_length=100, required=True, label='Your Email') shortdesc = forms.CharField(max_length=100, required=True, label="Subject") details = forms.CharField(required=True, widget=forms.Textarea, - label="What is your comment?") + label="What is your comment?") diff --git a/pgweb/docs/models.py b/pgweb/docs/models.py index 7a522147..2e612417 100644 --- a/pgweb/docs/models.py +++ b/pgweb/docs/models.py @@ -1,6 +1,7 @@ from django.db import models from pgweb.core.models import Version + class DocPage(models.Model): id = models.AutoField(null=False, primary_key=True) file = models.CharField(max_length=64, null=False, blank=False) @@ -20,6 +21,7 @@ class DocPage(models.Model): # Index file first, because we want to list versions by file unique_together = [('file', 'version')] + class DocPageAlias(models.Model): file1 = models.CharField(max_length=64, null=False, blank=False, unique=True) file2 = models.CharField(max_length=64, null=False, blank=False, unique=True) @@ -30,4 +32,4 @@ class DocPageAlias(models.Model): # XXX: needs a unique functional index as well, see the migration! class Meta: db_table = 'docsalias' - verbose_name_plural='Doc page aliases' + verbose_name_plural = 'Doc page aliases' diff --git a/pgweb/docs/struct.py b/pgweb/docs/struct.py index 942d0573..78c3f933 100644 --- a/pgweb/docs/struct.py +++ b/pgweb/docs/struct.py @@ -1,6 +1,7 @@ from django.db import connection from pgweb.core.models import Version + def get_struct(): currentversion = Version.objects.get(current=True) @@ -31,7 +32,7 @@ def get_struct(): version = int(version) yield ('docs/%s/%s' % (version, filename), - testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio + testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio loaded) # Also yield the current version urls, with the highest @@ -40,6 +41,7 @@ def get_struct(): yield ('docs/current/%s' % filename, 1.0, loaded) + # For our internal sitemap (used only by our own search engine), # include the devel version of the docs (and only those, since the # other versions are already included) diff --git a/pgweb/docs/views.py b/pgweb/docs/views.py index d0936272..3934e568 100644 --- a/pgweb/docs/views.py +++ b/pgweb/docs/views.py @@ -17,6 +17,7 @@ from pgweb.core.models import Version from models import DocPage from forms import DocCommentForm + @allow_frames @content_sources('style', "'unsafe-inline'") def docpage(request, version, filename): @@ -57,9 +58,9 @@ def docpage(request, version, filename): where=["file=%s OR file IN (SELECT file2 FROM docsalias WHERE file1=%s) OR file IN (SELECT file1 FROM docsalias WHERE file2=%s)"], params=[fullname, fullname, fullname], select={ - 'supported':"COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')", - 'testing':"COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)", - }).order_by('-supported', 'version').only('version', 'file') + 'supported': "COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')", + 'testing': "COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)", + }).order_by('-supported', 'version').only('version', 'file') return render(request, 'docs/docspage.html', { 'page': page, @@ -71,6 +72,7 @@ def docpage(request, version, filename): 'loaddate': loaddate, }) + def docspermanentredirect(request, version, typ, page, *args): """Provides a permanent redirect from the old static/interactive pages to the modern pages that do not have said keywords. @@ -80,18 +82,22 @@ def docspermanentredirect(request, version, typ, page, *args): url += page return HttpResponsePermanentRedirect(url) + def docsrootpage(request, version): return docpage(request, version, 'index') + def redirect_root(request, version): return HttpResponsePermanentRedirect("/docs/%s/" % version) + def root(request): - versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree') + versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0, tree__gt=0)).order_by('-tree') return render_pgweb(request, 'docs', 'docs/index.html', { 'versions': versions, }) + class _VersionPdfWrapper(object): """ A wrapper around a version that knows to look for PDF files, and @@ -110,26 +116,31 @@ class _VersionPdfWrapper(object): self.indexname = 'postgres.html' else: self.indexname = 'index.html' + def __getattr__(self, name): return getattr(self.__version, name) + def _find_pdf(self, pagetype): try: return os.stat('%s/documentation/pdf/%s/postgresql-%s-%s.pdf' % (settings.STATIC_CHECKOUT, self.__version.numtree, self.__version.numtree, pagetype)).st_size except: return 0 + def manuals(request): - versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree') + versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0, tree__gt=0)).order_by('-tree') return render_pgweb(request, 'docs', 'docs/manuals.html', { 'versions': [_VersionPdfWrapper(v) for v in versions], }) + def manualarchive(request): - versions = Version.objects.filter(testing=0,supported=False,tree__gt=0).order_by('-tree') + versions = Version.objects.filter(testing=0, supported=False, tree__gt=0).order_by('-tree') return render_pgweb(request, 'docs', 'docs/archive.html', { 'versions': [_VersionPdfWrapper(v) for v in versions], }) + @login_required def commentform(request, itemid, version, filename): v = get_object_or_404(Version, tree=version) diff --git a/pgweb/downloads/admin.py b/pgweb/downloads/admin.py index 2449acbd..9b5ed3ef 100644 --- a/pgweb/downloads/admin.py +++ b/pgweb/downloads/admin.py @@ -7,23 +7,27 @@ import re from pgweb.util.admin import PgwebAdmin from models import StackBuilderApp, Category, Product, LicenceType + class ProductAdmin(PgwebAdmin): list_display = ('name', 'org', 'approved', 'lastconfirmed',) list_filter = ('approved',) search_fields = ('name', 'description', ) ordering = ('name', ) + def duplicate_stackbuilderapp(modeladmin, request, queryset): # Duplicate each individual selected object, but turn off # the active flag if it's on. for o in queryset: - o.id = None # Triggers creation of a new object + o.id = None # Triggers creation of a new object o.active = False o.textid = o.textid + "_new" o.save() + duplicate_stackbuilderapp.short_description = "Duplicate application" + class StackBuilderAppAdminForm(forms.ModelForm): class Meta: model = StackBuilderApp @@ -47,11 +51,13 @@ class StackBuilderAppAdminForm(forms.ModelForm): raise ValidationError("Dependency '%s' does not exist!" % d) return self.cleaned_data['txtdependencies'] + class StackBuilderAppAdmin(admin.ModelAdmin): list_display = ('textid', 'active', 'name', 'platform', 'version', ) actions = [duplicate_stackbuilderapp, ] form = StackBuilderAppAdminForm + admin.site.register(Category) admin.site.register(LicenceType) admin.site.register(Product, ProductAdmin) diff --git a/pgweb/downloads/forms.py b/pgweb/downloads/forms.py index 150e0b70..2faf63ec 100644 --- a/pgweb/downloads/forms.py +++ b/pgweb/downloads/forms.py @@ -3,13 +3,17 @@ from django import forms from pgweb.core.models import Organisation from models import Product + class ProductForm(forms.ModelForm): form_intro = """Note that in order to register a new product, you must first register an organisation. If you have not done so, use this form.""" + def __init__(self, *args, **kwargs): super(ProductForm, self).__init__(*args, **kwargs) + def filter_by_user(self, user): self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) + class Meta: model = Product exclude = ('lastconfirmed', 'approved', ) diff --git a/pgweb/downloads/models.py b/pgweb/downloads/models.py index 682fdd92..003f0364 100644 --- a/pgweb/downloads/models.py +++ b/pgweb/downloads/models.py @@ -13,6 +13,7 @@ class Category(models.Model): class Meta: ordering = ('catname',) + class LicenceType(models.Model): typename = models.CharField(max_length=100, null=False, blank=False) @@ -22,6 +23,7 @@ class LicenceType(models.Model): class Meta: ordering = ('typename',) + class Product(models.Model): name = models.CharField(max_length=100, null=False, blank=False, unique=True) approved = models.BooleanField(null=False, default=False) @@ -45,17 +47,27 @@ class Product(models.Model): class Meta: ordering = ('name',) + class StackBuilderApp(models.Model): textid = models.CharField(max_length=100, null=False, blank=False) version = models.CharField(max_length=20, null=False, blank=False) platform = models.CharField(max_length=20, null=False, blank=False, - choices= (('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), ('osx', 'Mac OS X'), - ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)')) - ) + choices=( + ('windows', 'Windows (32-bit)'), + ('windows-x64', 'Windows (64-bit)'), + ('osx', 'Mac OS X'), + ('linux', 'Linux (32-bit)'), + ('linux-x64', 'Linux (64-bit)'), + )) secondaryplatform = models.CharField(max_length=20, null=False, blank=True, - choices= (('', 'None'), ('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), - ('osx', 'Mac OS X'), ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)')) - ) + choices=( + ('', 'None'), + ('windows', 'Windows (32-bit)'), + ('windows-x64', 'Windows (64-bit)'), + ('osx', 'Mac OS X'), + ('linux', 'Linux (32-bit)'), + ('linux-x64', 'Linux (64-bit)') + )) name = models.CharField(max_length=500, null=False, blank=False) active = models.BooleanField(null=False, blank=False, default=True) description = models.TextField(null=False, blank=False) @@ -63,10 +75,14 @@ class StackBuilderApp(models.Model): pgversion = models.CharField(max_length=5, null=False, blank=True) edbversion = models.CharField(max_length=5, null=False, blank=True) format = models.CharField(max_length=5, null=False, blank=False, - choices = (('bin', 'Linux .bin'), ('app', 'Mac .app'), - ('pkg', 'Mac .pkg'), ('mpkg', 'Mac .mpkg'), - ('exe', 'Windows .exe'), ('msi', 'Windows .msi')) - ) + choices=( + ('bin', 'Linux .bin'), + ('app', 'Mac .app'), + ('pkg', 'Mac .pkg'), + ('mpkg', 'Mac .mpkg'), + ('exe', 'Windows .exe'), + ('msi', 'Windows .msi') + )) installoptions = models.CharField(max_length=500, null=False, blank=True) upgradeoptions = models.CharField(max_length=500, null=False, blank=True) checksum = models.CharField(max_length=32, null=False, blank=False) diff --git a/pgweb/downloads/struct.py b/pgweb/downloads/struct.py index 27a92072..1b3fb8de 100644 --- a/pgweb/downloads/struct.py +++ b/pgweb/downloads/struct.py @@ -1,5 +1,6 @@ from models import Category + def get_struct(): # Products for c in Category.objects.all(): diff --git a/pgweb/downloads/views.py b/pgweb/downloads/views.py index b51f829e..7ccdbb3a 100644 --- a/pgweb/downloads/views.py +++ b/pgweb/downloads/views.py @@ -17,6 +17,7 @@ from pgweb.core.models import Version from models import Category, Product, StackBuilderApp from forms import ProductForm + ####### # FTP browser ####### @@ -30,7 +31,7 @@ def ftpbrowser(request, subpath): raise Http404 subpath = subpath.strip('/') else: - subpath="" + subpath = "" # Pickle up the list of things we need try: @@ -73,19 +74,19 @@ def ftpbrowser(request, subpath): del allnodes # Add all directories - directories = [{'link': k, 'url': k, 'type': 'd'} for k,v in node.items() if v['t'] == 'd'] + directories = [{'link': k, 'url': k, 'type': 'd'} for k, v in node.items() if v['t'] == 'd'] # Add all symlinks (only directories supported) - directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k,v in node.items() if v['t'] == 'l']) + directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k, v in node.items() if v['t'] == 'l']) # A ittle early sorting wouldn't go amiss, so .. ends up at the top - directories.sort(key = version_sort, reverse=True) + directories.sort(key=version_sort, reverse=True) # Add a link to the parent directory if subpath: - directories.insert(0, {'link':'[Parent Directory]', 'url':'..'}) + directories.insert(0, {'link': '[Parent Directory]', 'url': '..'}) # Fetch files - files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k,v in node.items() if v['t'] == 'f'] + files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k, v in node.items() if v['t'] == 'f'] breadcrumbs = [] if subpath: @@ -98,12 +99,12 @@ def ftpbrowser(request, subpath): breadroot = "%s/%s" % (breadroot, pathpiece) else: breadroot = pathpiece - breadcrumbs.append({'name': pathpiece, 'path': breadroot}); + breadcrumbs.append({'name': pathpiece, 'path': breadroot}) # Check if there are any "content files" we should render directly on the webpage - file_readme = (node.has_key('README') and node['README']['t']=='f') and node['README']['c'] or None; - file_message = (node.has_key('.message') and node['.message']['t']=='f') and node['.message']['c'] or None; - file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None; + file_readme = (node.has_key('README') and node['README']['t'] == 'f') and node['README']['c'] or None + file_message = (node.has_key('.message') and node['.message']['t'] == 'f') and node['.message']['c'] or None + file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None del node @@ -153,6 +154,7 @@ def uploadftp(request): # Finally, indicate to the client that we're happy return HttpResponse("OK", content_type="text/plain") + @csrf_exempt def uploadyum(request): if request.method != 'PUT': @@ -182,6 +184,7 @@ def uploadyum(request): # Finally, indicate to the client that we're happy return HttpResponse("OK", content_type="text/plain") + @nocache def mirrorselect(request, path): # Old access to mirrors will just redirect to the main ftp site. @@ -197,7 +200,8 @@ def yum_js(request): return render(request, 'downloads/js/yum.js', { 'json': jsonstr, 'supported_versions': ','.join([str(v.numtree) for v in Version.objects.filter(supported=True)]), - }, content_type='application/json') + }, content_type='application/json') + ####### # Product catalogue @@ -208,20 +212,23 @@ def categorylist(request): 'categories': categories, }) + def productlist(request, catid, junk=None): category = get_object_or_404(Category, pk=catid) - products = Product.objects.select_related('org','licencetype').filter(category=category, approved=True) + products = Product.objects.select_related('org', 'licencetype').filter(category=category, approved=True) return render_pgweb(request, 'download', 'downloads/productlist.html', { 'category': category, 'products': products, 'productcount': len(products), }) + @login_required def productform(request, itemid): return simple_form(Product, itemid, request, ProductForm, redirect='/account/edit/products/') + ####### # Stackbuilder ####### diff --git a/pgweb/events/admin.py b/pgweb/events/admin.py index 2ac51df1..343d378f 100644 --- a/pgweb/events/admin.py +++ b/pgweb/events/admin.py @@ -4,14 +4,18 @@ from django import forms from pgweb.util.admin import PgwebAdmin from models import Event + def approve_event(modeladmin, request, queryset): # We need to do this in a loop even though it's less efficient, # since using queryset.update() will not send the moderation messages. for e in queryset: e.approved = True e.save() + + approve_event.short_description = 'Approve event' + class EventAdminForm(forms.ModelForm): class Meta: model = Event @@ -28,6 +32,7 @@ class EventAdminForm(forms.ModelForm): del cleaned_data['country'] return cleaned_data + class EventAdmin(PgwebAdmin): list_display = ('title', 'org', 'startdate', 'enddate', 'approved',) list_filter = ('approved',) diff --git a/pgweb/events/feeds.py b/pgweb/events/feeds.py index 21aa0d4b..9b8bed48 100644 --- a/pgweb/events/feeds.py +++ b/pgweb/events/feeds.py @@ -4,6 +4,7 @@ from models import Event from datetime import datetime, time + class EventFeed(Feed): title = description = "PostgreSQL events" link = "https://www.postgresql.org/" @@ -18,4 +19,4 @@ class EventFeed(Feed): return "https://www.postgresql.org/about/event/%s/" % obj.id def item_pubdate(self, obj): - return datetime.combine(obj.startdate,time.min) + return datetime.combine(obj.startdate, time.min) diff --git a/pgweb/events/forms.py b/pgweb/events/forms.py index 460c5a76..65227237 100644 --- a/pgweb/events/forms.py +++ b/pgweb/events/forms.py @@ -4,16 +4,19 @@ from django.forms import ValidationError from pgweb.core.models import Organisation from models import Event + class EventForm(forms.ModelForm): toggle_fields = [ { 'name': 'isonline', 'invert': True, - 'fields': ['city', 'state', 'country',] + 'fields': ['city', 'state', 'country', ] }, ] + def __init__(self, *args, **kwargs): super(EventForm, self).__init__(*args, **kwargs) + def filter_by_user(self, user): self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) diff --git a/pgweb/events/models.py b/pgweb/events/models.py index f956d931..fa94c14a 100644 --- a/pgweb/events/models.py +++ b/pgweb/events/models.py @@ -2,6 +2,7 @@ from django.db import models from pgweb.core.models import Country, Language, Organisation + class Event(models.Model): approved = models.BooleanField(null=False, blank=False, default=False) @@ -42,13 +43,13 @@ class Event(models.Model): mgrs = self.org.managers.all() if len(mgrs) == 1: if mgrs[0].pk == 0: - return False # Migration organisation + return False # Migration organisation else: - return True # Has an actual organisation + return True # Has an actual organisation elif len(mgrs) > 1: # More than one manager means it must be new return True - return False # Has no organisastion at all + return False # Has no organisastion at all @property def displaydate(self): @@ -67,4 +68,4 @@ class Event(models.Model): return "%s, %s" % (self.city, self.country) class Meta: - ordering = ('-startdate','-enddate',) + ordering = ('-startdate', '-enddate', ) diff --git a/pgweb/events/struct.py b/pgweb/events/struct.py index e60303dd..c83c635f 100644 --- a/pgweb/events/struct.py +++ b/pgweb/events/struct.py @@ -1,6 +1,7 @@ from datetime import date from models import Event + def get_struct(): now = date.today() @@ -14,4 +15,4 @@ def get_struct(): if yearsold > 4: yearsold = 4 yield ('about/event/%s/' % n.id, - 0.5-(yearsold/10.0)) + 0.5 - (yearsold / 10.0)) diff --git a/pgweb/events/views.py b/pgweb/events/views.py index d0fa7da8..9c4cdb9e 100644 --- a/pgweb/events/views.py +++ b/pgweb/events/views.py @@ -10,31 +10,35 @@ from pgweb.util.helpers import simple_form from models import Event from forms import EventForm + def main(request): community_events = Event.objects.select_related('country').filter(approved=True, badged=True).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',) other_events = Event.objects.select_related('country').filter(approved=True, badged=False).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',) return render_pgweb(request, 'about', 'events/archive.html', { 'title': 'Upcoming Events', 'eventblocks': ( - { 'name': 'Community Events', 'events': community_events, 'link': '',}, - { 'name': 'Other Events', 'events': other_events, 'link': '',}, + {'name': 'Community Events', 'events': community_events, 'link': '', }, + {'name': 'Other Events', 'events': other_events, 'link': '', }, ), }) + def _eventarchive(request, title): # Hardcode to the latest 100 events. Do we need paging too? events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100] return render_pgweb(request, 'about', 'events/archive.html', { - 'title': '%s Archive' % title, - 'archive': True, - 'eventblocks': ( - {'name': title, 'events': events, }, - ), + 'title': '%s Archive' % title, + 'archive': True, + 'eventblocks': ( + {'name': title, 'events': events, }, + ), }) + def archive(request): return _eventarchive(request, 'Event') + def item(request, itemid, throwaway=None): event = get_object_or_404(Event, pk=itemid) if not event.approved: @@ -43,6 +47,7 @@ def item(request, itemid, throwaway=None): 'obj': event, }) + @login_required def form(request, itemid): return simple_form(Event, itemid, request, EventForm, diff --git a/pgweb/featurematrix/admin.py b/pgweb/featurematrix/admin.py index 1581c830..c5c286c3 100644 --- a/pgweb/featurematrix/admin.py +++ b/pgweb/featurematrix/admin.py @@ -2,18 +2,22 @@ from django.contrib import admin from models import Feature, FeatureGroup + class FeatureInline(admin.TabularInline): model = Feature + class FeatureGroupAdmin(admin.ModelAdmin): inlines = [FeatureInline, ] list_display = ('groupname', 'groupsort') ordering = ['groupsort'] + class FeatureAdmin(admin.ModelAdmin): list_display = ('featurename', 'group') list_filter = ('group',) search_fields = ('featurename',) + admin.site.register(FeatureGroup, FeatureGroupAdmin) admin.site.register(Feature, FeatureAdmin) diff --git a/pgweb/featurematrix/models.py b/pgweb/featurematrix/models.py index 9ed6cbd6..033c238f 100644 --- a/pgweb/featurematrix/models.py +++ b/pgweb/featurematrix/models.py @@ -1,12 +1,13 @@ from django.db import models choices_map = { - 0: {'str': 'No', 'class': 'no', 'bgcolor': '#ffdddd'}, - 1: {'str': 'Yes', 'class': 'yes', 'bgcolor': '#ddffdd'}, - 2: {'str': 'Obsolete', 'class': 'obs', 'bgcolor': '#ddddff'}, - 3: {'str': '?', 'class': 'unk', 'bgcolor': '#ffffaa'}, + 0: {'str': 'No', 'class': 'no', 'bgcolor': '#ffdddd'}, + 1: {'str': 'Yes', 'class': 'yes', 'bgcolor': '#ddffdd'}, + 2: {'str': 'Obsolete', 'class': 'obs', 'bgcolor': '#ddddff'}, + 3: {'str': '?', 'class': 'unk', 'bgcolor': '#ffffaa'}, } -choices = [(k, v['str']) for k,v in choices_map.items()] +choices = [(k, v['str']) for k, v in choices_map.items()] + class FeatureGroup(models.Model): groupname = models.CharField(max_length=100, null=False, blank=False) @@ -20,13 +21,14 @@ class FeatureGroup(models.Model): @property def columns(self): # Return a list of all the columns for the matrix - return [b for a,b in versions] + return [b for a, b in versions] + class Feature(models.Model): group = models.ForeignKey(FeatureGroup, null=False, blank=False) featurename = models.CharField(max_length=100, null=False, blank=False) featuredescription = models.TextField(null=False, blank=True) - #WARNING! All fields that start with "v" will be considered versions! + # WARNING! All fields that start with "v" will be considered versions! v74 = models.IntegerField(verbose_name="7.4", null=False, blank=False, default=0, choices=choices) v74.visible_default = False v80 = models.IntegerField(verbose_name="8.0", null=False, blank=False, default=0, choices=choices) @@ -53,7 +55,7 @@ class Feature(models.Model): def columns(self): # Get a list of column based on all versions that are visible_default - return [choices_map[getattr(self, a)] for a,b in versions] + return [choices_map[getattr(self, a)] for a, b in versions] @property def featurelink(self): @@ -62,5 +64,6 @@ class Feature(models.Model): else: return 'detail/%s/' % self.id -versions = [(f.name,f.verbose_name) for f in Feature()._meta.fields if f.name.startswith('v') and getattr(f, 'visible_default', True)] + +versions = [(f.name, f.verbose_name) for f in Feature()._meta.fields if f.name.startswith('v') and getattr(f, 'visible_default', True)] versions = sorted(versions, key=lambda f: -float(f[1])) diff --git a/pgweb/featurematrix/views.py b/pgweb/featurematrix/views.py index a50c9fd0..5ad800e3 100644 --- a/pgweb/featurematrix/views.py +++ b/pgweb/featurematrix/views.py @@ -5,6 +5,7 @@ from pgweb.util.contexts import render_pgweb from pgweb.core.models import Version from models import Feature + def root(request): features = Feature.objects.all().select_related().order_by('group__groupsort', 'group__groupname', 'featurename') groups = [] @@ -29,6 +30,7 @@ def root(request): 'versions': versions, }) + def detail(request, featureid): feature = get_object_or_404(Feature, pk=featureid) return render_pgweb(request, 'about', 'featurematrix/featuredetail.html', { diff --git a/pgweb/legacyurl/models.py b/pgweb/legacyurl/models.py index 54f60fe2..0b4331b3 100644 --- a/pgweb/legacyurl/models.py +++ b/pgweb/legacyurl/models.py @@ -1,3 +1,3 @@ -#from django.db import models +# from django.db import models # Create your models here. diff --git a/pgweb/legacyurl/views.py b/pgweb/legacyurl/views.py index 5fda8fca..437f2a20 100644 --- a/pgweb/legacyurl/views.py +++ b/pgweb/legacyurl/views.py @@ -1,5 +1,6 @@ from django.http import HttpResponseRedirect + def mailpref(request, listname): # Just redirect to the homepage of pglister, don't try specific lists return HttpResponseRedirect("https://lists.postgresql.org/") diff --git a/pgweb/lists/forms.py b/pgweb/lists/forms.py index 0afb9065..e79048bf 100644 --- a/pgweb/lists/forms.py +++ b/pgweb/lists/forms.py @@ -1,2 +1 @@ from django import forms - diff --git a/pgweb/lists/management/commands/sync_lists.py b/pgweb/lists/management/commands/sync_lists.py index fe2701a6..1c6e94f1 100644 --- a/pgweb/lists/management/commands/sync_lists.py +++ b/pgweb/lists/management/commands/sync_lists.py @@ -7,6 +7,7 @@ from django.db import connection, transaction from django.conf import settings import requests + class Command(BaseCommand): help = 'Synchronize mailinglists' @@ -15,9 +16,9 @@ class Command(BaseCommand): def handle(self, *args, **options): if settings.ARCHIVES_SEARCH_PLAINTEXT: - proto="http" + proto = "http" else: - proto="https" + proto = "https" r = requests.get('{0}://{1}/listinfo/'.format(proto, settings.ARCHIVES_SEARCH_SERVER)) j = r.json() allgroups = list(set([l['group'] for l in j])) diff --git a/pgweb/lists/models.py b/pgweb/lists/models.py index 0d398c9a..55520e19 100644 --- a/pgweb/lists/models.py +++ b/pgweb/lists/models.py @@ -1,5 +1,6 @@ from django.db import models + class MailingListGroup(models.Model): groupname = models.CharField(max_length=64, null=False, blank=False) sortkey = models.IntegerField(null=False, default=10) @@ -16,6 +17,7 @@ class MailingListGroup(models.Model): class Meta: ordering = ('sortkey', ) + class MailingList(models.Model): group = models.ForeignKey(MailingListGroup, null=False) listname = models.CharField(max_length=64, null=False, blank=False, unique=True) diff --git a/pgweb/lists/views.py b/pgweb/lists/views.py index 62db498f..42531554 100644 --- a/pgweb/lists/views.py +++ b/pgweb/lists/views.py @@ -4,20 +4,21 @@ import json from models import MailingList, MailingListGroup + def listinfo(request): resp = HttpResponse(content_type='application/json') - groupdata = [ { - 'id': g.id, - 'name': g.groupname, - 'sort': g.sortkey, - } for g in MailingListGroup.objects.all()] - listdata = [ { - 'id': l.id, - 'name': l.listname, - 'groupid': l.group_id, - 'active': l.active, - 'shortdesc': l.shortdesc, - 'description': l.description, - } for l in MailingList.objects.all()] + groupdata = [{ + 'id': g.id, + 'name': g.groupname, + 'sort': g.sortkey, + } for g in MailingListGroup.objects.all()] + listdata = [{ + 'id': l.id, + 'name': l.listname, + 'groupid': l.group_id, + 'active': l.active, + 'shortdesc': l.shortdesc, + 'description': l.description, + } for l in MailingList.objects.all()] json.dump({'groups': groupdata, 'lists': listdata}, resp) return resp diff --git a/pgweb/mailqueue/admin.py b/pgweb/mailqueue/admin.py index 801d1163..6529ea18 100644 --- a/pgweb/mailqueue/admin.py +++ b/pgweb/mailqueue/admin.py @@ -4,6 +4,7 @@ from email.parser import Parser from models import QueuedMail + class QueuedMailAdmin(admin.ModelAdmin): model = QueuedMail readonly_fields = ('parsed_content', ) @@ -27,4 +28,5 @@ class QueuedMailAdmin(admin.ModelAdmin): parsed_content.short_description = 'Parsed mail' + admin.site.register(QueuedMail, QueuedMailAdmin) diff --git a/pgweb/mailqueue/management/commands/send_queued_mail.py b/pgweb/mailqueue/management/commands/send_queued_mail.py index 5e019fb6..361b19f0 100755 --- a/pgweb/mailqueue/management/commands/send_queued_mail.py +++ b/pgweb/mailqueue/management/commands/send_queued_mail.py @@ -13,6 +13,7 @@ import smtplib from pgweb.mailqueue.models import QueuedMail + class Command(BaseCommand): help = 'Send queued mail' diff --git a/pgweb/mailqueue/models.py b/pgweb/mailqueue/models.py index 10c50f3d..f7018431 100644 --- a/pgweb/mailqueue/models.py +++ b/pgweb/mailqueue/models.py @@ -1,5 +1,6 @@ from django.db import models + class QueuedMail(models.Model): sender = models.EmailField(max_length=100, null=False, blank=False) receiver = models.EmailField(max_length=100, null=False, blank=False) diff --git a/pgweb/mailqueue/util.py b/pgweb/mailqueue/util.py index 71c63350..8b85982e 100644 --- a/pgweb/mailqueue/util.py +++ b/pgweb/mailqueue/util.py @@ -8,11 +8,13 @@ from email.header import Header from models import QueuedMail + def _encoded_email_header(name, email): if name: return formataddr((str(Header(name, 'utf-8')), email)) return email + def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, usergenerated=False, cc=None, replyto=None, sendername=None, receivername=None, messageid=None): # attachment format, each is a tuple of (name, mimetype,contents) # content should be *binary* and not base64 encoded, since we need to @@ -36,14 +38,13 @@ def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, userge if attachments: for filename, contenttype, content in attachments: - main,sub = contenttype.split('/') - part = MIMENonMultipart(main,sub) + main, sub = contenttype.split('/') + part = MIMENonMultipart(main, sub) part.set_payload(content) part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename) encoders.encode_base64(part) msg.attach(part) - # Just write it to the queue, so it will be transactionally rolled back QueuedMail(sender=sender, receiver=receiver, fullmsg=msg.as_string(), usergenerated=usergenerated).save() if cc: @@ -52,6 +53,7 @@ def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, userge # message content to extract cc fields). QueuedMail(sender=sender, receiver=cc, fullmsg=msg.as_string(), usergenerated=usergenerated).save() + def send_mail(sender, receiver, fullmsg, usergenerated=False): # Send an email, prepared as the full MIME encoded mail already QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg, usergenerated=False).save() diff --git a/pgweb/misc/forms.py b/pgweb/misc/forms.py index b8b46b89..5505d82c 100644 --- a/pgweb/misc/forms.py +++ b/pgweb/misc/forms.py @@ -3,28 +3,30 @@ from django.db.models import Q from pgweb.core.models import Version + class _version_choices(): def __iter__(self): yield ('-1', '** Select version') q = Q(supported=True) | Q(testing__gt=0) for v in Version.objects.filter(q): - for minor in range(v.latestminor,-1,-1): - if not v.testing or minor>0: + for minor in range(v.latestminor, -1, -1): + if not v.testing or minor > 0: # For beta/rc versions, there is no beta0, so exclude it s = v.buildversionstring(minor) - yield (s,s) + yield (s, s) yield ('Unsupported/Unknown', 'Unsupported/Unknown') + class SubmitBugForm(forms.Form): name = forms.CharField(max_length=100, required=True) email = forms.EmailField(max_length=100, required=True) pgversion = forms.CharField(max_length=20, required=True, - label="PostgreSQL version", - widget=forms.Select(choices=_version_choices())) + label="PostgreSQL version", + widget=forms.Select(choices=_version_choices())) os = forms.CharField(max_length=50, required=True, - label="Operating system") + label="Operating system") shortdesc = forms.CharField(max_length=100, required=True, - label="Short description") + label="Short description") details = forms.CharField(required=True, widget=forms.Textarea) def clean_pgversion(self): diff --git a/pgweb/misc/models.py b/pgweb/misc/models.py index 58148e70..90addd9e 100644 --- a/pgweb/misc/models.py +++ b/pgweb/misc/models.py @@ -1,5 +1,6 @@ from django.db import models + class BugIdMap(models.Model): # Explicit id field because we don't want a SERIAL here, since we generate # the actual bug IDs externally. diff --git a/pgweb/misc/views.py b/pgweb/misc/views.py index 9656da81..a0431f9d 100644 --- a/pgweb/misc/views.py +++ b/pgweb/misc/views.py @@ -17,12 +17,14 @@ from pgweb.misc.models import BugIdMap from forms import SubmitBugForm + def _make_bugs_messageid(bugid): return "<{0}-{1}@postgresql.org>".format( bugid, hashlib.md5("{0}-{1}".format(os.getpid(), time.time())).hexdigest()[:16], ) + @login_required def submitbug(request): if request.method == 'POST': @@ -73,17 +75,20 @@ def submitbug(request): 'savebutton': 'Submit and Send Email', }) + @login_required def submitbug_done(request, bugid): return render_pgweb(request, 'support', 'misc/bug_completed.html', { 'bugid': bugid, }) + def bugs_redir(request, bugid): r = get_object_or_404(BugIdMap, id=bugid) return HttpResponseRedirect("{0}/message-id/{1}".format(settings.SITE_ROOT, r.messageid)) + # A crash testing URL. If the file /tmp/crashtest exists, raise a http 500 # error. Otherwise, just return a fixed text response def crashtest(request): diff --git a/pgweb/news/admin.py b/pgweb/news/admin.py index 44734b7c..545dc4e0 100644 --- a/pgweb/news/admin.py +++ b/pgweb/news/admin.py @@ -3,6 +3,7 @@ from django.contrib import admin from pgweb.util.admin import PgwebAdmin from models import NewsArticle, NewsTag + class NewsArticleAdmin(PgwebAdmin): list_display = ('title', 'org', 'date', 'approved', ) list_filter = ('approved', ) @@ -17,8 +18,10 @@ class NewsArticleAdmin(PgwebAdmin): } return super(NewsArticleAdmin, self).change_view(request, object_id, extra_context=my_context) + class NewsTagAdmin(PgwebAdmin): list_display = ('urlname', 'name', 'description') + admin.site.register(NewsArticle, NewsArticleAdmin) admin.site.register(NewsTag, NewsTagAdmin) diff --git a/pgweb/news/feeds.py b/pgweb/news/feeds.py index b28ad8c7..aab8585f 100644 --- a/pgweb/news/feeds.py +++ b/pgweb/news/feeds.py @@ -4,6 +4,7 @@ from models import NewsArticle from datetime import datetime, time + class NewsFeed(Feed): title = description = "PostgreSQL news" link = "https://www.postgresql.org/" @@ -24,4 +25,4 @@ class NewsFeed(Feed): return "https://www.postgresql.org/about/news/%s/" % obj.id def item_pubdate(self, obj): - return datetime.combine(obj.date,time.min) + return datetime.combine(obj.date, time.min) diff --git a/pgweb/news/forms.py b/pgweb/news/forms.py index a711cfae..d25ac471 100644 --- a/pgweb/news/forms.py +++ b/pgweb/news/forms.py @@ -4,11 +4,14 @@ from django.forms import ValidationError from pgweb.core.models import Organisation from models import NewsArticle, NewsTag + class NewsArticleForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(NewsArticleForm, self).__init__(*args, **kwargs) + def filter_by_user(self, user): self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) + def clean_date(self): if self.instance.pk and self.instance.approved: if self.cleaned_data['date'] != self.instance.date: diff --git a/pgweb/news/management/commands/twitter_post.py b/pgweb/news/management/commands/twitter_post.py index ce49b24c..5e980fc1 100644 --- a/pgweb/news/management/commands/twitter_post.py +++ b/pgweb/news/management/commands/twitter_post.py @@ -15,6 +15,7 @@ from pgweb.news.models import NewsArticle import requests_oauthlib + class Command(BaseCommand): help = 'Post to twitter' @@ -24,7 +25,7 @@ class Command(BaseCommand): if not curs.fetchall()[0][0]: raise CommandError("Failed to get advisory lock, existing twitter_post process stuck?") - articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now()-timedelta(days=7)).order_by('date')) + articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now() - timedelta(days=7)).order_by('date')) if not len(articles): return @@ -35,7 +36,7 @@ class Command(BaseCommand): for a in articles: # We hardcode 30 chars for the URL shortener. And then 10 to cover the intro and spacing. - statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140-40], settings.SITE_ROOT, a.id) + statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140 - 40], settings.SITE_ROOT, a.id) r = tw.post('https://api.twitter.com/1.1/statuses/update.json', data={ 'status': statusstr, }) diff --git a/pgweb/news/management/commands/twitter_register.py b/pgweb/news/management/commands/twitter_register.py index 6914d10f..0b911392 100644 --- a/pgweb/news/management/commands/twitter_register.py +++ b/pgweb/news/management/commands/twitter_register.py @@ -9,6 +9,7 @@ from django.conf import settings import requests_oauthlib + class Command(BaseCommand): help = 'Register with twitter oauth' diff --git a/pgweb/news/models.py b/pgweb/news/models.py index d31f64e8..ab6ed148 100644 --- a/pgweb/news/models.py +++ b/pgweb/news/models.py @@ -2,6 +2,7 @@ from django.db import models from datetime import date from pgweb.core.models import Organisation + class NewsTag(models.Model): urlname = models.CharField(max_length=20, null=False, blank=False, unique=True) name = models.CharField(max_length=32, null=False, blank=False) @@ -13,6 +14,7 @@ class NewsTag(models.Model): class Meta: ordering = ('urlname', ) + class NewsArticle(models.Model): org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.") approved = models.BooleanField(null=False, blank=False, default=False) diff --git a/pgweb/news/struct.py b/pgweb/news/struct.py index 4c49a196..42a8de57 100644 --- a/pgweb/news/struct.py +++ b/pgweb/news/struct.py @@ -1,9 +1,10 @@ from datetime import date, timedelta from models import NewsArticle + def get_struct(): now = date.today() - fouryearsago = date.today() - timedelta(4*365, 0, 0) + fouryearsago = date.today() - timedelta(4 * 365, 0, 0) # We intentionally don't put /about/newsarchive/ in the sitemap, # since we don't care about getting it indexed. @@ -14,4 +15,4 @@ def get_struct(): if yearsold > 4: yearsold = 4 yield ('about/news/%s/' % n.id, - 0.5-(yearsold/10.0)) + 0.5 - (yearsold / 10.0)) diff --git a/pgweb/news/views.py b/pgweb/news/views.py index a055cd17..2c3a2a7c 100644 --- a/pgweb/news/views.py +++ b/pgweb/news/views.py @@ -10,9 +10,10 @@ from forms import NewsArticleForm import json + def archive(request, tag=None, paging=None): if tag: - tag = get_object_or_404(NewsTag,urlname=tag.strip('/')) + tag = get_object_or_404(NewsTag, urlname=tag.strip('/')) news = NewsArticle.objects.filter(approved=True, tags=tag) else: tag = None @@ -23,6 +24,7 @@ def archive(request, tag=None, paging=None): 'newstags': NewsTag.objects.all(), }) + def item(request, itemid, throwaway=None): news = get_object_or_404(NewsArticle, pk=itemid) if not news.approved: @@ -32,6 +34,7 @@ def item(request, itemid, throwaway=None): 'newstags': NewsTag.objects.all(), }) + def taglist_json(request): return HttpResponse(json.dumps({ 'tags': [{'name': t.urlname, 'description': t.description} for t in NewsTag.objects.distinct('urlname')], diff --git a/pgweb/profserv/admin.py b/pgweb/profserv/admin.py index b644146c..722a8451 100644 --- a/pgweb/profserv/admin.py +++ b/pgweb/profserv/admin.py @@ -3,9 +3,11 @@ from django.contrib import admin from pgweb.util.admin import PgwebAdmin from models import ProfessionalService + class ProfessionalServiceAdmin(PgwebAdmin): list_display = ('__unicode__', 'approved',) list_filter = ('approved',) search_fields = ('org__name',) + admin.site.register(ProfessionalService, ProfessionalServiceAdmin) diff --git a/pgweb/profserv/forms.py b/pgweb/profserv/forms.py index 8df2ff40..3ec70a93 100644 --- a/pgweb/profserv/forms.py +++ b/pgweb/profserv/forms.py @@ -3,13 +3,17 @@ from django import forms from pgweb.core.models import Organisation from models import ProfessionalService + class ProfessionalServiceForm(forms.ModelForm): form_intro = """Note that in order to register a new professional service, you must first register an organisation. If you have not done so, use this form.""" + def __init__(self, *args, **kwargs): super(ProfessionalServiceForm, self).__init__(*args, **kwargs) + def filter_by_user(self, user): self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True) + class Meta: model = ProfessionalService exclude = ('submitter', 'approved', ) diff --git a/pgweb/profserv/models.py b/pgweb/profserv/models.py index 180e1d43..7f5b58db 100644 --- a/pgweb/profserv/models.py +++ b/pgweb/profserv/models.py @@ -2,14 +2,15 @@ from django.db import models from pgweb.core.models import Organisation + class ProfessionalService(models.Model): approved = models.BooleanField(null=False, blank=False, default=False) - org = models.OneToOneField(Organisation, null=False, blank=False, - db_column="organisation_id", - verbose_name="organisation", - help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.") - description = models.TextField(null=False,blank=False) + org = models.OneToOneField(Organisation, null=False, blank=False, + db_column="organisation_id", + verbose_name="organisation", + help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.") + description = models.TextField(null=False, blank=False) employees = models.CharField(max_length=32, null=True, blank=True) locations = models.CharField(max_length=128, null=True, blank=True) region_africa = models.BooleanField(null=False, default=False, verbose_name="Africa") diff --git a/pgweb/profserv/struct.py b/pgweb/profserv/struct.py index 659753b2..c053d03b 100644 --- a/pgweb/profserv/struct.py +++ b/pgweb/profserv/struct.py @@ -1,5 +1,6 @@ from views import regions + def get_struct(): for key, name in regions: yield ('support/professional_support/%s/' % key, None) diff --git a/pgweb/profserv/views.py b/pgweb/profserv/views.py index ad135d22..e618bee3 100644 --- a/pgweb/profserv/views.py +++ b/pgweb/profserv/views.py @@ -8,18 +8,19 @@ from models import ProfessionalService from forms import ProfessionalServiceForm regions = ( - ('africa','Africa'), - ('asia','Asia'), - ('europe','Europe'), - ('northamerica','North America'), - ('oceania','Oceania'), - ('southamerica','South America'), + ('africa', 'Africa'), + ('asia', 'Asia'), + ('europe', 'Europe'), + ('northamerica', 'North America'), + ('oceania', 'Oceania'), + ('southamerica', 'South America'), ) + def root(request, servtype): - title = servtype=='support' and 'Professional Services' or 'Hosting Providers' - what = servtype=='support' and 'support' or 'hosting' - support = servtype=='support' + title = servtype == 'support' and 'Professional Services' or 'Hosting Providers' + what = servtype == 'support' and 'support' or 'hosting' + support = servtype == 'support' return render_pgweb(request, 'support', 'profserv/root.html', { 'title': title, 'support': support, @@ -29,19 +30,19 @@ def root(request, servtype): def region(request, servtype, regionname): - regname = [n for r,n in regions if r==regionname] + regname = [n for r, n in regions if r == regionname] if not regname: raise Http404 regname = regname[0] - what = servtype=='support' and 'support' or 'hosting' - whatname = servtype=='support' and 'Professional Services' or 'Hosting Providers' + what = servtype == 'support' and 'support' or 'hosting' + whatname = servtype == 'support' and 'Professional Services' or 'Hosting Providers' title = "%s - %s" % (whatname, regname) - support = servtype=='support' + support = servtype == 'support' # DB model is a bit funky here, so use the extra-where functionality to filter properly. # Field names are cleaned up earlier, so it's safe against injections. - services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what),]) + services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what), ]) return render_pgweb(request, 'support', 'profserv/list.html', { 'title': title, diff --git a/pgweb/pugs/admin.py b/pgweb/pugs/admin.py index 35e13e73..588714cb 100644 --- a/pgweb/pugs/admin.py +++ b/pgweb/pugs/admin.py @@ -3,9 +3,11 @@ from django.contrib import admin from pgweb.util.admin import PgwebAdmin from models import PUG + class PUGAdmin(PgwebAdmin): list_display = ('title', 'approved', ) list_filter = ('approved', ) search_fields = ('title', ) + admin.site.register(PUG, PUGAdmin) diff --git a/pgweb/pugs/models.py b/pgweb/pugs/models.py index 54383f10..acc42e09 100644 --- a/pgweb/pugs/models.py +++ b/pgweb/pugs/models.py @@ -1,5 +1,6 @@ from django.db import models + class PUG(models.Model): """ contains information about a local PostgreSQL user group diff --git a/pgweb/pugs/views.py b/pgweb/pugs/views.py index 167fd1f0..4333e37c 100644 --- a/pgweb/pugs/views.py +++ b/pgweb/pugs/views.py @@ -2,6 +2,7 @@ from pgweb.util.contexts import render_pgweb from models import PUG + def index(request): """ contains list of PUGs, in country/locale alphabetical order diff --git a/pgweb/quotes/admin.py b/pgweb/quotes/admin.py index 39267e17..aa6bb2d5 100644 --- a/pgweb/quotes/admin.py +++ b/pgweb/quotes/admin.py @@ -1,7 +1,9 @@ from django.contrib import admin from models import Quote + class QuoteAdmin(admin.ModelAdmin): list_display = ('quote', 'who', 'org', ) + admin.site.register(Quote, QuoteAdmin) diff --git a/pgweb/quotes/models.py b/pgweb/quotes/models.py index fa690bcf..dbfa59ff 100644 --- a/pgweb/quotes/models.py +++ b/pgweb/quotes/models.py @@ -1,5 +1,6 @@ from django.db import models + class Quote(models.Model): approved = models.BooleanField(null=False, default=False) quote = models.TextField(null=False, blank=False) diff --git a/pgweb/search/models.py b/pgweb/search/models.py index 4ee48cd5..24e16895 100644 --- a/pgweb/search/models.py +++ b/pgweb/search/models.py @@ -1 +1 @@ -#from django.db import models +# from django.db import models diff --git a/pgweb/search/views.py b/pgweb/search/views.py index 07f5ded1..65add875 100644 --- a/pgweb/search/views.py +++ b/pgweb/search/views.py @@ -18,9 +18,10 @@ from pgweb.lists.models import MailingList # it, so we allow development installs to run without it... try: import pylibmc - has_memcached=True + has_memcached = True except: - has_memcached=False + has_memcached = False + def generate_pagelinks(pagenum, totalpages, querystring): # Generate a list of links to page through a search result @@ -31,21 +32,21 @@ def generate_pagelinks(pagenum, totalpages, querystring): if pagenum > 1: # Prev link - yield 'Prev' % (querystring, pagenum-1) + yield 'Prev' % (querystring, pagenum - 1) if pagenum > 10: start = pagenum - 10 else: start = 1 - for i in range(start, min(start+20, totalpages + 1)): + for i in range(start, min(start + 20, totalpages + 1)): if i == pagenum: yield "%s" % i else: yield '%s' % (querystring, i, i) - if pagenum != min(start+20, totalpages): - yield 'Next' % (querystring, pagenum+1) + if pagenum != min(start + 20, totalpages): + yield 'Next' % (querystring, pagenum + 1) @csrf_exempt @@ -102,10 +103,10 @@ def search(request): dateval = 365 sortoptions = ( - {'val':'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')}, - {'val':'d', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'}, - {'val':'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'}, - ) + {'val': 'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')}, + {'val': 'd', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'}, + {'val': 'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'}, + ) dateoptions = ( {'val': -1, 'text': 'anytime'}, {'val': 1, 'text': 'within last day'}, @@ -113,7 +114,7 @@ def search(request): {'val': 31, 'text': 'within last month'}, {'val': 186, 'text': 'within last 6 months'}, {'val': 365, 'text': 'within last year'}, - ) + ) else: searchlists = False if request.GET.has_key('u'): @@ -130,24 +131,24 @@ def search(request): if not request.GET.has_key('q') or request.GET['q'] == '': if searchlists: return render(request, 'search/listsearch.html', { - 'search_error': "No search term specified.", - 'sortoptions': sortoptions, - 'lists': MailingList.objects.all().order_by("group__sortkey"), - 'listid': listid, - 'dates': dateoptions, - 'dateval': dateval, - }) + 'search_error': "No search term specified.", + 'sortoptions': sortoptions, + 'lists': MailingList.objects.all().order_by("group__sortkey"), + 'listid': listid, + 'dates': dateoptions, + 'dateval': dateval, + }) else: return render(request, 'search/sitesearch.html', { - 'search_error': "No search term specified.", - }) + 'search_error': "No search term specified.", + }) query = request.GET['q'].strip() # Anti-stefan prevention if len(query) > 1000: return render(request, 'search/sitesearch.html', { 'search_error': "Search term too long.", - }) + }) # Is the request being paged? if request.GET.has_key('p'): @@ -167,7 +168,7 @@ def search(request): p = { 'q': query.encode('utf-8'), 's': listsort, - } + } if listid: if listid < 0: # This is a list group, we expand that on the web server @@ -180,7 +181,7 @@ def search(request): # If memcached is available, let's try it hits = None if has_memcached: - memc = pylibmc.Client(['127.0.0.1',], binary=True) + memc = pylibmc.Client(['127.0.0.1', ], binary=True) # behavior not supported on pylibmc in squeeze:: behaviors={'tcp_nodelay':True}) try: hits = memc.get(urlstr) @@ -194,23 +195,23 @@ def search(request): else: c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5) c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'}) - c.sock.settimeout(20) # Set a 20 second timeout + c.sock.settimeout(20) # Set a 20 second timeout try: r = c.getresponse() except (socket.timeout, ssl.SSLError): return render(request, 'search/listsearch.html', { - 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.', - }) + 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.', + }) if r.status != 200: memc = None return render(request, 'search/listsearch.html', { - 'search_error': 'Error talking to search server: %s' % r.reason, - }) + 'search_error': 'Error talking to search server: %s' % r.reason, + }) hits = json.loads(r.read()) if has_memcached and memc: # Store them in memcached too! But only for 10 minutes... # And always compress it, just because we can - memc.set(urlstr, hits, 60*10, 1) + memc.set(urlstr, hits, 60 * 10, 1) memc = None if isinstance(hits, dict): @@ -226,31 +227,31 @@ def search(request): listid or '', dateval, listsort - ) + ) return render(request, 'search/listsearch.html', { - 'hitcount': totalhits, - 'firsthit': firsthit, - 'lasthit': min(totalhits, firsthit+hitsperpage-1), - 'query': request.GET['q'], - 'pagelinks': " ".join( - generate_pagelinks(pagenum, - totalhits / hitsperpage + 1, - querystr)), - 'hits': [{ - 'date': h['d'], - 'subject': h['s'], - 'author': h['f'], - 'messageid': h['m'], - 'abstract': h['a'], - 'rank': h['r'], - } for h in hits[firsthit-1:firsthit+hitsperpage-1]], - 'sortoptions': sortoptions, - 'lists': MailingList.objects.all().order_by("group__sortkey"), - 'listid': listid, - 'dates': dateoptions, - 'dateval': dateval, - }) + 'hitcount': totalhits, + 'firsthit': firsthit, + 'lasthit': min(totalhits, firsthit + hitsperpage - 1), + 'query': request.GET['q'], + 'pagelinks': " ".join( + generate_pagelinks(pagenum, + totalhits / hitsperpage + 1, + querystr)), + 'hits': [{ + 'date': h['d'], + 'subject': h['s'], + 'author': h['f'], + 'messageid': h['m'], + 'abstract': h['a'], + 'rank': h['r'], + } for h in hits[firsthit - 1:firsthit + hitsperpage - 1]], + 'sortoptions': sortoptions, + 'lists': MailingList.objects.all().order_by("group__sortkey"), + 'listid': listid, + 'dates': dateoptions, + 'dateval': dateval, + }) else: # Website search is still done by making a regular pgsql connection @@ -260,8 +261,8 @@ def search(request): curs = conn.cursor() except: return render(request, 'search/sitesearch.html', { - 'search_error': 'Could not connect to search database.' - }) + 'search_error': 'Could not connect to search database.' + }) # This is kind of a hack, but... Some URLs are flagged as internal # and should as such only be included in searches that explicitly @@ -280,11 +281,11 @@ def search(request): 'allsites': allsites, 'suburl': suburl, 'internal': include_internal, - }) + }) except psycopg2.ProgrammingError: return render(request, 'search/sitesearch.html', { - 'search_error': 'Error executing search query.' - }) + 'search_error': 'Error executing search query.' + }) hits = curs.fetchall() conn.close() @@ -300,22 +301,22 @@ def search(request): urllib.quote_plus(query.encode('utf-8')), allsites and "1" or "0", quoted_suburl, - ) + ) return render(request, 'search/sitesearch.html', { - 'suburl': suburl, - 'allsites': allsites, - 'hitcount': totalhits, - 'firsthit': firsthit, - 'lasthit': min(totalhits, firsthit+hitsperpage-1), - 'query': request.GET['q'], - 'pagelinks': " ".join( - generate_pagelinks(pagenum, - totalhits / hitsperpage + 1, - querystr)), - 'hits': [{ - 'title': h[3], - 'url': "%s%s" % (h[1], h[2]), - 'abstract': h[4].replace("[[[[[[", "").replace("]]]]]]",""), - 'rank': h[5]} for h in hits[:-1]], - }) + 'suburl': suburl, + 'allsites': allsites, + 'hitcount': totalhits, + 'firsthit': firsthit, + 'lasthit': min(totalhits, firsthit + hitsperpage - 1), + 'query': request.GET['q'], + 'pagelinks': " ".join( + generate_pagelinks(pagenum, + totalhits / hitsperpage + 1, + querystr)), + 'hits': [{ + 'title': h[3], + 'url': "%s%s" % (h[1], h[2]), + 'abstract': h[4].replace("[[[[[[", "").replace("]]]]]]", ""), + 'rank': h[5]} for h in hits[:-1]], + }) diff --git a/pgweb/security/admin.py b/pgweb/security/admin.py index 977a407b..03228614 100644 --- a/pgweb/security/admin.py +++ b/pgweb/security/admin.py @@ -6,26 +6,30 @@ from pgweb.core.models import Version from pgweb.news.models import NewsArticle from models import SecurityPatch, SecurityPatchVersion + class VersionChoiceField(forms.ModelChoiceField): def label_from_instance(self, obj): return obj.numtree + class SecurityPatchVersionAdminForm(forms.ModelForm): model = SecurityPatchVersion version = VersionChoiceField(queryset=Version.objects.filter(tree__gt=0), required=True) + class SecurityPatchVersionAdmin(admin.TabularInline): model = SecurityPatchVersion extra = 2 form = SecurityPatchVersionAdminForm + class SecurityPatchForm(forms.ModelForm): model = SecurityPatch newspost = forms.ModelChoiceField(queryset=NewsArticle.objects.filter(org=settings.PGDG_ORG_ID), required=False) def clean(self): d = super(SecurityPatchForm, self).clean() - vecs = [v for k,v in d.items() if k.startswith('vector_')] + vecs = [v for k, v in d.items() if k.startswith('vector_')] empty = [v for v in vecs if v == ''] if len(empty) != len(vecs) and len(empty) != 0: for k in d.keys(): @@ -33,6 +37,7 @@ class SecurityPatchForm(forms.ModelForm): self.add_error(k, 'Either specify all vector values or none') return d + class SecurityPatchAdmin(admin.ModelAdmin): form = SecurityPatchForm exclude = ['cvenumber', ] @@ -54,12 +59,15 @@ class SecurityPatchAdmin(admin.ModelAdmin): def make_public(self, request, queryset): self.do_public(queryset, True) + def make_unpublic(self, request, queryset): self.do_public(queryset, False) + def do_public(self, queryset, val): # Intentionally loop and do manually, so we generate change notices for p in queryset.all(): - p.public=val + p.public = val p.save() + admin.site.register(SecurityPatch, SecurityPatchAdmin) diff --git a/pgweb/security/management/commands/update_cve_links.py b/pgweb/security/management/commands/update_cve_links.py index e74c072a..d07641d4 100644 --- a/pgweb/security/management/commands/update_cve_links.py +++ b/pgweb/security/management/commands/update_cve_links.py @@ -13,6 +13,7 @@ from pgweb.util.misc import varnish_purge import requests + class Command(BaseCommand): help = 'Update CVE links' diff --git a/pgweb/security/models.py b/pgweb/security/models.py index 34166fca..be317931 100644 --- a/pgweb/security/models.py +++ b/pgweb/security/models.py @@ -8,7 +8,7 @@ from pgweb.news.models import NewsArticle import cvss -vector_choices = {k:list(v.items()) for k,v in cvss.constants3.METRICS_VALUE_NAMES.items()} +vector_choices = {k: list(v.items()) for k, v in cvss.constants3.METRICS_VALUE_NAMES.items()} component_choices = ( ('core server', 'Core server product'), @@ -19,25 +19,29 @@ component_choices = ( ('other', 'Other'), ) + re_cve = re.compile('^(\d{4})-(\d{4,5})$') + + def cve_validator(val): if not re_cve.match(val): raise ValidationError("Enter CVE in format 0000-0000 without the CVE text") + def other_vectors_validator(val): if val != val.upper(): raise ValidationError("Vector must be uppercase") try: for vector in val.split('/'): - k,v = vector.split(':') + k, v = vector.split(':') if not cvss.constants3.METRICS_VALUES.has_key(k): raise ValidationError("Metric {0} is unknown".format(k)) if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'): raise ValidationError("Metric {0} must be specified in the dropdowns".format(k)) if not cvss.constants3.METRICS_VALUES[k].has_key(v): raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format( - k,v, + k, v, ", ".join(cvss.constants3.METRICS_VALUES[k].keys()), )) except ValidationError: @@ -45,10 +49,11 @@ def other_vectors_validator(val): except Exception, e: raise ValidationError("Failed to parse vectors: %s" % e) + class SecurityPatch(models.Model): public = models.BooleanField(null=False, blank=False, default=False) newspost = models.ForeignKey(NewsArticle, null=True, blank=True) - cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator,]) + cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator, ]) cve_visible = models.BooleanField(null=False, blank=False, default=False) cvenumber = models.IntegerField(null=False, blank=False, db_index=True) detailslink = models.URLField(null=False, blank=True) @@ -65,7 +70,7 @@ class SecurityPatch(models.Model): vector_c = models.CharField(max_length=1, null=False, blank=True, verbose_name="Confidentiality Impact", choices=vector_choices['C']) vector_i = models.CharField(max_length=1, null=False, blank=True, verbose_name="Integrity Impact", choices=vector_choices['I']) vector_a = models.CharField(max_length=1, null=False, blank=True, verbose_name="Availability Impact", choices=vector_choices['A']) - legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'),('B','B'),('C','C'),('D','D'))) + legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'), ('B', 'B'), ('C', 'C'), ('D', 'D'))) purge_urls = ('/support/security/', ) @@ -109,8 +114,8 @@ class SecurityPatch(models.Model): verbose_name_plural = 'Security patches' ordering = ('-cvenumber',) + class SecurityPatchVersion(models.Model): patch = models.ForeignKey(SecurityPatch, null=False, blank=False) version = models.ForeignKey(Version, null=False, blank=False) fixed_minor = models.IntegerField(null=False, blank=False) - diff --git a/pgweb/security/views.py b/pgweb/security/views.py index eeba3663..5340bafb 100644 --- a/pgweb/security/views.py +++ b/pgweb/security/views.py @@ -5,9 +5,11 @@ from pgweb.util.contexts import render_pgweb from pgweb.core.models import Version from models import SecurityPatch + def GetPatchesList(filt): return SecurityPatch.objects.raw("SELECT p.*, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END ORDER BY v.tree) AS affected, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END || '.' || fixed_minor ORDER BY v.tree) AS fixed FROM security_securitypatch p INNER JOIN security_securitypatchversion sv ON p.id=sv.patch_id INNER JOIN core_version v ON v.id=sv.version_id WHERE p.public AND {0} GROUP BY p.id ORDER BY cvenumber DESC".format(filt)) + def _list_patches(request, filt): patches = GetPatchesList(filt) @@ -19,10 +21,12 @@ def _list_patches(request, filt): ), }) + def index(request): # Show all supported versions return _list_patches(request, "v.supported") + def version(request, numtree): version = get_object_or_404(Version, tree=numtree) # It's safe to pass in the value since we get it from the module, not from diff --git a/pgweb/settings.py b/pgweb/settings.py index 68a86f06..1c72af65 100644 --- a/pgweb/settings.py +++ b/pgweb/settings.py @@ -8,12 +8,12 @@ ADMINS = ( MANAGERS = ADMINS -DATABASES={ +DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'pgweb', - } } +} # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name @@ -58,7 +58,7 @@ MIDDLEWARE_CLASSES = [ 'pgweb.util.middleware.PgMiddleware', ] -CSRF_FAILURE_VIEW='pgweb.core.views.csrf_failure' +CSRF_FAILURE_VIEW = 'pgweb.core.views.csrf_failure' ROOT_URLCONF = 'pgweb.urls' @@ -80,9 +80,9 @@ TEMPLATES = [{ }, }] -LOGIN_URL='/account/login/' -LOGIN_REDIRECT_URL='/account/' -LOGOUT_URL='/account/logout/' +LOGIN_URL = '/account/login/' +LOGIN_REDIRECT_URL = '/account/' +LOGOUT_URL = '/account/logout/' AUTHENTICATION_BACKENDS = ( 'pgweb.util.auth.AuthBackend', @@ -118,48 +118,48 @@ INSTALLED_APPS = [ ] # Default format for date/time (as it changes between machines) -DATETIME_FORMAT="Y-m-d H:i:s" +DATETIME_FORMAT = "Y-m-d H:i:s" # Configure recaptcha. Most details contain keys and are thus handled # in settings_local.py. Override NOCAPTCHA to actually use them. -NOCAPTCHA=True -RECAPTCHA_SITE_KEY="" -RECAPTCHA_SECRET_KEY="" +NOCAPTCHA = True +RECAPTCHA_SITE_KEY = "" +RECAPTCHA_SECRET_KEY = "" ### # Application specific settings, likely overridden in settings_local.py. # # In particular, adjust the email addresses ### -SESSION_COOKIE_SECURE=True # Allow our session only over https -SESSION_COOKIE_DOMAIN="www.postgresql.org" # Don't allow access by other postgresql.org sites -SESSION_COOKIE_HTTPONLY=True # Access over http only, no js -CSRF_COOKIE_SECURE=SESSION_COOKIE_SECURE -CSRF_COOKIE_DOMAIN=SESSION_COOKIE_DOMAIN -CSRF_COOKIE_HTTPONLY=SESSION_COOKIE_HTTPONLY +SESSION_COOKIE_SECURE = True # Allow our session only over https +SESSION_COOKIE_DOMAIN = "www.postgresql.org" # Don't allow access by other postgresql.org sites +SESSION_COOKIE_HTTPONLY = True # Access over http only, no js +CSRF_COOKIE_SECURE = SESSION_COOKIE_SECURE +CSRF_COOKIE_DOMAIN = SESSION_COOKIE_DOMAIN +CSRF_COOKIE_HTTPONLY = SESSION_COOKIE_HTTPONLY -SITE_ROOT="http://www.postgresql.org" # Root of working URLs -FTP_PICKLE="/usr/local/pgweb/ftpsite.pickle" # Location of file with current contents from ftp site -YUM_JSON="/usr/local/pgweb/external/yum.json" -STATIC_CHECKOUT="/usr/local/pgweb-static" # Location of a checked out pgweb-static project -NOTIFICATION_EMAIL="someone@example.com" # Address to send notifications *to* -NOTIFICATION_FROM="someone@example.com" # Address to send notifications *from* -ACCOUNTS_NOREPLY_FROM="someone@example.com" # Address to send account messages from -BUGREPORT_EMAIL="someone@example.com" # Address to pgsql-bugs list -BUGREPORT_NOREPLY_EMAIL="someone-noreply@example.com" # Address to no-reply pgsql-bugs address -DOCSREPORT_EMAIL="someone@example.com" # Address to pgsql-docs list -DOCSREPORT_NOREPLY_EMAIL="someone-noreply@example.com" # Address to no-reply pgsql-docs address -FRONTEND_SERVERS=() # A tuple containing the *IP addresses* of all the - # varnish frontend servers in use. -FTP_MASTERS=() # A tuple containing the *IP addresses* of all machines - # trusted to upload ftp structure data -VARNISH_PURGERS=() # Extra servers that can do varnish purges through our queue -DO_ESI=False # Generate ESI tags -ARCHIVES_SEARCH_SERVER="archives.postgresql.org" # Where to post REST request for archives search -ARCHIVES_SEARCH_PLAINTEXT=False # Contact ARCHIVES_SEARCH_SERVER with http instead of https -FRONTEND_SMTP_RELAY="magus.postgresql.org" # Where to relay user generated email -OAUTH={} # OAuth providers and keys -PGDG_ORG_ID=-1 # id of the PGDG organisation entry +SITE_ROOT = "http://www.postgresql.org" # Root of working URLs +FTP_PICKLE = "/usr/local/pgweb/ftpsite.pickle" # Location of file with current contents from ftp site +YUM_JSON = "/usr/local/pgweb/external/yum.json" +STATIC_CHECKOUT = "/usr/local/pgweb-static" # Location of a checked out pgweb-static project +NOTIFICATION_EMAIL = "someone@example.com" # Address to send notifications *to* +NOTIFICATION_FROM = "someone@example.com" # Address to send notifications *from* +ACCOUNTS_NOREPLY_FROM = "someone@example.com" # Address to send account messages from +BUGREPORT_EMAIL = "someone@example.com" # Address to pgsql-bugs list +BUGREPORT_NOREPLY_EMAIL = "someone-noreply@example.com" # Address to no-reply pgsql-bugs address +DOCSREPORT_EMAIL = "someone@example.com" # Address to pgsql-docs list +DOCSREPORT_NOREPLY_EMAIL = "someone-noreply@example.com" # Address to no-reply pgsql-docs address +FRONTEND_SERVERS = () # A tuple containing the *IP addresses* of all the + # varnish frontend servers in use. +FTP_MASTERS = () # A tuple containing the *IP addresses* of all machines + # trusted to upload ftp structure data +VARNISH_PURGERS = () # Extra servers that can do varnish purges through our queue +DO_ESI = False # Generate ESI tags +ARCHIVES_SEARCH_SERVER = "archives.postgresql.org" # Where to post REST request for archives search +ARCHIVES_SEARCH_PLAINTEXT = False # Contact ARCHIVES_SEARCH_SERVER with http instead of https +FRONTEND_SMTP_RELAY = "magus.postgresql.org" # Where to relay user generated email +OAUTH = {} # OAuth providers and keys +PGDG_ORG_ID = -1 # id of the PGDG organisation entry # Load local settings overrides from settings_local import * diff --git a/pgweb/sponsors/models.py b/pgweb/sponsors/models.py index e5d48d83..378d789e 100644 --- a/pgweb/sponsors/models.py +++ b/pgweb/sponsors/models.py @@ -2,6 +2,7 @@ from django.db import models from pgweb.core.models import Country + class SponsorType(models.Model): typename = models.CharField(max_length=32, null=False, blank=False) description = models.TextField(null=False, blank=False) @@ -16,6 +17,7 @@ class SponsorType(models.Model): class Meta: ordering = ('sortkey', ) + class Sponsor(models.Model): sponsortype = models.ForeignKey(SponsorType, null=False) name = models.CharField(max_length=128, null=False, blank=False) @@ -31,6 +33,7 @@ class Sponsor(models.Model): class Meta: ordering = ('name', ) + class Server(models.Model): name = models.CharField(max_length=32, null=False, blank=False) sponsors = models.ManyToManyField(Sponsor) diff --git a/pgweb/sponsors/views.py b/pgweb/sponsors/views.py index 15bef9d4..d78356f1 100644 --- a/pgweb/sponsors/views.py +++ b/pgweb/sponsors/views.py @@ -3,13 +3,15 @@ from pgweb.util.decorators import cache from models import Sponsor, Server + @cache(minutes=30) def sponsors(request): - sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey' ,'?') + sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey', '?') return render_pgweb(request, 'about', 'sponsors/sponsors.html', { 'sponsors': sponsors, }) + def servers(request): servers = Server.objects.select_related().all() return render_pgweb(request, 'about', 'sponsors/servers.html', { diff --git a/pgweb/survey/admin.py b/pgweb/survey/admin.py index cdd9c4cd..7d59e313 100644 --- a/pgweb/survey/admin.py +++ b/pgweb/survey/admin.py @@ -1,13 +1,16 @@ from django.contrib import admin from models import Survey, SurveyLock, SurveyAnswer + class SurveyAdmin(admin.ModelAdmin): - list_display = ('question','posted','current',) - ordering = ('-posted',) + list_display = ('question', 'posted', 'current', ) + ordering = ('-posted', ) + class SurveyAnswerAdmin(admin.ModelAdmin): - list_display = ('survey','tot1','tot2','tot3','tot4','tot5','tot6','tot7','tot8') - ordering = ('-survey__posted',) + list_display = ('survey', 'tot1', 'tot2', 'tot3', 'tot4', 'tot5', 'tot6', 'tot7', 'tot8') + ordering = ('-survey__posted', ) + admin.site.register(Survey, SurveyAdmin) admin.site.register(SurveyLock) diff --git a/pgweb/survey/models.py b/pgweb/survey/models.py index 72cfe7c7..237d3177 100644 --- a/pgweb/survey/models.py +++ b/pgweb/survey/models.py @@ -1,16 +1,20 @@ from django.db import models + # internal text/value object class SurveyQuestion(object): def __init__(self, value, text): self.value = value self.text = text + + class SurveyAnswerValues(object): def __init__(self, option, votes, votespercent): self.option = option self.votes = votes self.votespercent = votespercent + class Survey(models.Model): question = models.CharField(max_length=500, null=False, blank=False) opt1 = models.CharField(max_length=500, null=False, blank=False) @@ -31,7 +35,7 @@ class Survey(models.Model): @property def questions(self): - for i in range (1,9): + for i in range(1, 9): v = getattr(self, "opt%s" % i) if not v: break yield SurveyQuestion(i, v) @@ -45,22 +49,22 @@ class Survey(models.Model): @property def completeanswers(self): for a in self._get_complete_answers(): - yield SurveyAnswerValues(a[0], a[1], self.totalvotes>0 and (100*a[1]/self.totalvotes) or 0) + yield SurveyAnswerValues(a[0], a[1], self.totalvotes > 0 and (100 * a[1] / self.totalvotes) or 0) @property def totalvotes(self): - if not hasattr(self,"_totalvotes"): + if not hasattr(self, "_totalvotes"): self._totalvotes = 0 for a in self._get_complete_answers(): self._totalvotes = self._totalvotes + a[1] return self._totalvotes def _get_complete_answers(self): - for i in range(1,9): + for i in range(1, 9): q = getattr(self, "opt%s" % i) if not q: break n = getattr(self.answers, "tot%s" % i) - yield (q,n) + yield (q, n) def save(self): # Make sure only one survey at a time can be the current one @@ -71,12 +75,13 @@ class Survey(models.Model): for p in previous: if not p == self: p.current = False - p.save() # primary key check avoids recursion + p.save() # primary key check avoids recursion # Now that we've made any previously current ones non-current, we are # free to save this one. super(Survey, self).save() + class SurveyAnswer(models.Model): survey = models.OneToOneField(Survey, null=False, blank=False, primary_key=True) tot1 = models.IntegerField(null=False, default=0) @@ -90,6 +95,7 @@ class SurveyAnswer(models.Model): purge_urls = ('/community/survey', ) + class SurveyLock(models.Model): ipaddr = models.GenericIPAddressField(null=False, blank=False) time = models.DateTimeField(null=False, auto_now_add=True) diff --git a/pgweb/survey/views.py b/pgweb/survey/views.py index ba3c09a3..a30f96c6 100644 --- a/pgweb/survey/views.py +++ b/pgweb/survey/views.py @@ -10,6 +10,7 @@ from pgweb.util.helpers import HttpServerError from models import Survey, SurveyAnswer, SurveyLock + def results(request, surveyid, junk=None): survey = get_object_or_404(Survey, pk=surveyid) surveylist = Survey.objects.all().order_by('-posted') @@ -19,6 +20,7 @@ def results(request, surveyid, junk=None): 'surveylist': surveylist, }) + # Served over insecure HTTP, the Varnish proxy strips cookies @csrf_exempt def vote(request, surveyid): @@ -51,7 +53,7 @@ def vote(request, surveyid): lock.save() answers = SurveyAnswer.objects.get_or_create(survey=surv)[0] - setattr(answers, attrname, getattr(answers, attrname)+1) + setattr(answers, attrname, getattr(answers, attrname) + 1) answers.save() # Do explicit varnish purge, since it seems that the model doesn't diff --git a/pgweb/util/admin.py b/pgweb/util/admin.py index 6b59a1dc..b3b10f58 100644 --- a/pgweb/util/admin.py +++ b/pgweb/util/admin.py @@ -51,7 +51,7 @@ class PgwebAdmin(admin.ModelAdmin): for x in queryset: x.delete() custom_delete_selected.short_description = "Delete selected items" - actions=['custom_delete_selected'] + actions = ['custom_delete_selected'] def save_model(self, request, obj, form, change): if change and hasattr(self.model, 'send_notification') and self.model.send_notification: @@ -81,14 +81,16 @@ class PgwebAdmin(admin.ModelAdmin): msgstr) # Also generate a mail to the moderators - send_simple_mail(settings.NOTIFICATION_FROM, - settings.NOTIFICATION_EMAIL, - "Moderation comment on %s %s" % (obj.__class__._meta.verbose_name, obj.id), - _get_moderator_notification_text(obj, - request.POST['new_notification'], - request.user.username - )) - + send_simple_mail( + settings.NOTIFICATION_FROM, + settings.NOTIFICATION_EMAIL, + "Moderation comment on %s %s" % (obj.__class__._meta.verbose_name, obj.id), + _get_moderator_notification_text( + obj, + request.POST['new_notification'], + request.user.username + ) + ) # Either no notifications, or done with notifications super(PgwebAdmin, self).save_model(request, obj, form, change) @@ -112,7 +114,6 @@ request, and your submission will be re-moderated. """ % (objtype, txt) - def _get_moderator_notification_text(obj, txt, moderator): return """Moderator %s made a comment to a pending object: Object type: %s diff --git a/pgweb/util/auth.py b/pgweb/util/auth.py index de016ab7..218dfc3e 100644 --- a/pgweb/util/auth.py +++ b/pgweb/util/auth.py @@ -1,6 +1,7 @@ from django.contrib.auth.models import User from django.contrib.auth.backends import ModelBackend + # Special version of the authentication backend, so we can handle things like # forced lowercasing of usernames. class AuthBackend(ModelBackend): @@ -19,4 +20,4 @@ class AuthBackend(ModelBackend): # User not found, so clearly they can't log in! return None - return None # Should never get here, but just in case... + return None # Should never get here, but just in case... diff --git a/pgweb/util/contexts.py b/pgweb/util/contexts.py index a29a64f4..853ac1d9 100644 --- a/pgweb/util/contexts.py +++ b/pgweb/util/contexts.py @@ -5,83 +5,83 @@ from django.conf import settings # This is the whole site navigation structure. Stick in a smarter file? sitenav = { 'about': [ - {'title': 'About', 'link':'/about/'}, - {'title': 'Code of Conduct', 'link':'/about/policies/coc/', 'submenu': [ - {'title': 'Committee', 'link':'/about/policies/coc_committee/'} + {'title': 'About', 'link': '/about/'}, + {'title': 'Code of Conduct', 'link': '/about/policies/coc/', 'submenu': [ + {'title': 'Committee', 'link': '/about/policies/coc_committee/'} ]}, - {'title': 'Feature Matrix', 'link':'/about/featurematrix/'}, - {'title': 'Donate', 'link':'/about/donate/'}, - {'title': 'History', 'link':'/docs/current/history.html'}, - {'title': 'Sponsors', 'link':'/about/sponsors/', 'submenu': [ - {'title': 'Servers', 'link': '/about/servers/'}, + {'title': 'Feature Matrix', 'link': '/about/featurematrix/'}, + {'title': 'Donate', 'link': '/about/donate/'}, + {'title': 'History', 'link': '/docs/current/history.html'}, + {'title': 'Sponsors', 'link': '/about/sponsors/', 'submenu': [ + {'title': 'Servers', 'link': '/about/servers/'}, ]}, - {'title': 'Latest News', 'link':'/about/newsarchive/'}, - {'title': 'Upcoming Events', 'link':'/about/events/'}, - {'title': 'Press', 'link':'/about/press/'}, - {'title': 'Licence', 'link':'/about/licence/'}, + {'title': 'Latest News', 'link': '/about/newsarchive/'}, + {'title': 'Upcoming Events', 'link': '/about/events/'}, + {'title': 'Press', 'link': '/about/press/'}, + {'title': 'Licence', 'link': '/about/licence/'}, ], 'download': [ - {'title': 'Downloads', 'link':'/download/', 'submenu': [ - {'title': 'Binary', 'link':'/download/'}, - {'title': 'Source', 'link':'/ftp/source/'} + {'title': 'Downloads', 'link': '/download/', 'submenu': [ + {'title': 'Binary', 'link': '/download/'}, + {'title': 'Source', 'link': '/ftp/source/'} ]}, - {'title': 'Software Catalogue', 'link':'/download/product-categories/'}, - {'title': 'File Browser', 'link':'/ftp/'}, + {'title': 'Software Catalogue', 'link': '/download/product-categories/'}, + {'title': 'File Browser', 'link': '/ftp/'}, ], 'docs': [ - {'title': 'Documentation', 'link':'/docs/'}, - {'title': 'Manuals', 'link':'/docs/manuals/', 'submenu': [ - {'title': 'Archive', 'link':'/docs/manuals/archive/'}, - {'title': 'French', 'link':'https://docs.postgresql.fr/'}, - {'title': 'Japanese', 'link':'http://www.postgresql.jp/document/'}, - {'title': 'Russian', 'link':'https://postgrespro.ru/docs/postgresql'}, + {'title': 'Documentation', 'link': '/docs/'}, + {'title': 'Manuals', 'link': '/docs/manuals/', 'submenu': [ + {'title': 'Archive', 'link': '/docs/manuals/archive/'}, + {'title': 'French', 'link': 'https://docs.postgresql.fr/'}, + {'title': 'Japanese', 'link': 'http://www.postgresql.jp/document/'}, + {'title': 'Russian', 'link': 'https://postgrespro.ru/docs/postgresql'}, ]}, - {'title': 'Books', 'link':'/docs/books/'}, - {'title': 'Online Resources', 'link':'/docs/online-resources/'}, - {'title': 'Wiki', 'link':'https://wiki.postgresql.org'}, + {'title': 'Books', 'link': '/docs/books/'}, + {'title': 'Online Resources', 'link': '/docs/online-resources/'}, + {'title': 'Wiki', 'link': 'https://wiki.postgresql.org'}, ], 'community': [ - {'title': 'Community', 'link':'/community/'}, - {'title': 'Contributors', 'link':'/community/contributors/'}, - {'title': 'Mailing Lists', 'link':'/list/'}, - {'title': 'IRC', 'link':'/community/irc/'}, - {'title': 'Slack', 'link':'https://postgres-slack.herokuapp.com/'}, - {'title': 'Local User Groups', 'link':'/community/user-groups/'}, - {'title': 'Events', 'link':'/about/events/'}, - {'title': 'International Sites','link':'/community/international/'}, - {'title': 'Recognition Guidelines','link':'/community/recognition/'}, + {'title': 'Community', 'link': '/community/'}, + {'title': 'Contributors', 'link': '/community/contributors/'}, + {'title': 'Mailing Lists', 'link': '/list/'}, + {'title': 'IRC', 'link': '/community/irc/'}, + {'title': 'Slack', 'link': 'https://postgres-slack.herokuapp.com/'}, + {'title': 'Local User Groups', 'link': '/community/user-groups/'}, + {'title': 'Events', 'link': '/about/events/'}, + {'title': 'International Sites', 'link': '/community/international/'}, + {'title': 'Recognition Guidelines', 'link': '/community/recognition/'}, ], 'developer': [ - {'title': 'Developers', 'link':'/developer/'}, - {'title': 'Core Team', 'link':'/developer/core/'}, - {'title': 'Roadmap', 'link':'/developer/roadmap/'}, - {'title': 'Coding', 'link':'/developer/coding/'}, - {'title': 'Testing', 'link':'/developer/testing/', 'submenu': [ - {'title': 'Beta Information', 'link':'/developer/beta/'}, + {'title': 'Developers', 'link': '/developer/'}, + {'title': 'Core Team', 'link': '/developer/core/'}, + {'title': 'Roadmap', 'link': '/developer/roadmap/'}, + {'title': 'Coding', 'link': '/developer/coding/'}, + {'title': 'Testing', 'link': '/developer/testing/', 'submenu': [ + {'title': 'Beta Information', 'link': '/developer/beta/'}, ]}, - {'title': 'Mailing Lists', 'link':'/list/'}, - {'title': 'Developer FAQ', 'link':'https://wiki.postgresql.org/wiki/Developer_FAQ'}, + {'title': 'Mailing Lists', 'link': '/list/'}, + {'title': 'Developer FAQ', 'link': 'https://wiki.postgresql.org/wiki/Developer_FAQ'}, ], 'support': [ - {'title': 'Support', 'link':'/support/'}, - {'title': 'Versioning Policy', 'link':'/support/versioning/'}, - {'title': 'Security', 'link':'/support/security/'}, - {'title': 'Professional Services','link':'/support/professional_support/'}, - {'title': 'Hosting Solutions', 'link':'/support/professional_hosting/'}, - {'title': 'Report a Bug', 'link':'/account/submitbug/'}, + {'title': 'Support', 'link': '/support/'}, + {'title': 'Versioning Policy', 'link': '/support/versioning/'}, + {'title': 'Security', 'link': '/support/security/'}, + {'title': 'Professional Services', 'link': '/support/professional_support/'}, + {'title': 'Hosting Solutions', 'link': '/support/professional_hosting/'}, + {'title': 'Report a Bug', 'link': '/account/submitbug/'}, ], 'account': [ - {'title': 'Your account', 'link':'/account'}, - {'title': 'Profile', 'link':'/account/profile'}, - {'title': 'Submitted data', 'link':'/account', 'submenu': [ - {'title': 'News Articles', 'link':'/account/edit/news/'}, - {'title': 'Events', 'link':'/account/edit/events/'}, - {'title': 'Products', 'link':'/account/edit/products/'}, - {'title': 'Professional Services', 'link':'/account/edit/services/'}, - {'title': 'Organisations', 'link':'/account/edit/organisations/'}, + {'title': 'Your account', 'link': '/account'}, + {'title': 'Profile', 'link': '/account/profile'}, + {'title': 'Submitted data', 'link': '/account', 'submenu': [ + {'title': 'News Articles', 'link': '/account/edit/news/'}, + {'title': 'Events', 'link': '/account/edit/events/'}, + {'title': 'Products', 'link': '/account/edit/products/'}, + {'title': 'Professional Services', 'link': '/account/edit/services/'}, + {'title': 'Organisations', 'link': '/account/edit/organisations/'}, ]}, - {'title': 'Change password', 'link':'/account/changepwd/'}, - {'title': 'Logout', 'link':'/account/logout'}, + {'title': 'Change password', 'link': '/account/changepwd/'}, + {'title': 'Logout', 'link': '/account/logout'}, ], } @@ -92,10 +92,12 @@ def get_nav_menu(section): else: return {} + def render_pgweb(request, section, template, context): context['navmenu'] = get_nav_menu(section) return render(request, template, context) + def _get_gitrev(): # Return the current git revision, that is used for # cache-busting URLs. @@ -115,6 +117,7 @@ def _get_gitrev(): # If packed-refs also can't be read, just give up return 'eeeeeeee' + # Template context processor to add information about the root link and # the current git revision. git revision is returned as a lazy object so # we don't spend effort trying to load it if we don't need it (though diff --git a/pgweb/util/decorators.py b/pgweb/util/decorators.py index f29fd160..9333d456 100644 --- a/pgweb/util/decorators.py +++ b/pgweb/util/decorators.py @@ -3,6 +3,7 @@ from functools import wraps from collections import defaultdict from django.contrib.auth.decorators import login_required as django_login_required + def nocache(fn): def _nocache(request, *_args, **_kwargs): resp = fn(request, *_args, **_kwargs) @@ -10,17 +11,19 @@ def nocache(fn): return resp return _nocache + def cache(days=0, hours=0, minutes=0, seconds=0): "Set the server to cache object a specified time. td must be a timedelta object" def _cache(fn): def __cache(request, *_args, **_kwargs): resp = fn(request, *_args, **_kwargs) td = datetime.timedelta(hours=hours, minutes=minutes, seconds=seconds) - resp['Cache-Control'] = 's-maxage=%s' % (td.days*3600*24 + td.seconds) + resp['Cache-Control'] = 's-maxage=%s' % (td.days * 3600 * 24 + td.seconds) return resp return __cache return _cache + def allow_frames(fn): def _allow_frames(request, *_args, **_kwargs): resp = fn(request, *_args, **_kwargs) @@ -28,6 +31,7 @@ def allow_frames(fn): return resp return _allow_frames + def content_sources(what, source): def _script_sources(fn): def __script_sources(request, *_args, **_kwargs): @@ -39,12 +43,15 @@ def content_sources(what, source): return __script_sources return _script_sources + def script_sources(source): return content_sources('script', source) + def frame_sources(source): return content_sources('frame', source) + # A wrapped version of login_required that throws an exception if it's # used on a path that's not under /account/. def login_required(f): diff --git a/pgweb/util/helpers.py b/pgweb/util/helpers.py index 2159381b..60686ddb 100644 --- a/pgweb/util/helpers.py +++ b/pgweb/util/helpers.py @@ -4,6 +4,7 @@ from django.http import HttpResponseRedirect, Http404 from django.template.loader import get_template import django.utils.xmlutils + def simple_form(instancetype, itemid, request, formclass, formtemplate='base/form.html', redirect='/account/', navsection='account', fixedfields=None, createifempty=False): if itemid == 'new': instance = instancetype() @@ -33,7 +34,7 @@ def simple_form(instancetype, itemid, request, formclass, formtemplate='base/for # Set fixed fields. Note that this will not work if the fixed fields are ManyToMany, # but we'll fix that sometime in the future if fixedfields: - for k,v in fixedfields.items(): + for k, v in fixedfields.items(): setattr(r, k, v) r.save() @@ -72,13 +73,15 @@ def simple_form(instancetype, itemid, request, formclass, formtemplate='base/for 'operation': (itemid == "new") and "New" or "Edit", }) -def template_to_string(templatename, attrs = {}): + +def template_to_string(templatename, attrs={}): return get_template(templatename).render(attrs) + def HttpServerError(request, msg): r = render(request, 'errors/500.html', { - 'message': msg, - }) + 'message': msg, + }) r.status_code = 500 return r @@ -89,7 +92,7 @@ class PgXmlHelper(django.utils.xmlutils.SimplerXMLGenerator): self.skipempty = skipempty def add_xml_element(self, name, value): - if self.skipempty and value=='': return + if self.skipempty and value == '': return self.startElement(name, {}) self.characters(value) self.endElement(name) diff --git a/pgweb/util/middleware.py b/pgweb/util/middleware.py index 9abcae2e..941de86e 100644 --- a/pgweb/util/middleware.py +++ b/pgweb/util/middleware.py @@ -13,6 +13,8 @@ except ImportError: from django.utils._threading_local import local _thread_locals = local() + + def get_current_user(): return getattr(_thread_locals, 'user', None) @@ -24,7 +26,7 @@ class PgMiddleware(object): return None def process_request(self, request): -# Thread local store for username, see comment at the top of this file + # Thread local store for username, see comment at the top of this file _thread_locals.user = getattr(request, 'user', None) initialize_template_collection() @@ -46,16 +48,16 @@ class PgMiddleware(object): ('connect', ["'self'", "www.google-analytics.com", "ssl.google-analytics.com"]), ('media', ["'self'", ]), ('style', ["'self'", "fonts.googleapis.com"]), - ('font', ["'self'", "fonts.gstatic.com", "data:" ]), + ('font', ["'self'", "fonts.gstatic.com", "data:", ]), ]) if hasattr(response, 'x_allow_extra_sources'): - for k,v in response.x_allow_extra_sources.items(): + for k, v in response.x_allow_extra_sources.items(): if k in sources: sources[k].extend(v) else: sources[k] = v - security_policies = ["{0}-src {1}".format(k," ".join(v)) for k,v in sources.items()] + security_policies = ["{0}-src {1}".format(k, " ".join(v)) for k, v in sources.items()] if not getattr(response, 'x_allow_frames', False): response['X-Frame-Options'] = 'DENY' diff --git a/pgweb/util/misc.py b/pgweb/util/misc.py index 760ba872..65ffb330 100644 --- a/pgweb/util/misc.py +++ b/pgweb/util/misc.py @@ -8,6 +8,7 @@ from pgweb.mailqueue.util import send_simple_mail from pgweb.util.helpers import template_to_string import re + def send_template_mail(sender, receiver, subject, templatename, templateattr={}, usergenerated=False, cc=None, replyto=None, receivername=None, sendername=None, messageid=None): d = { 'link_root': settings.SITE_ROOT, @@ -19,6 +20,7 @@ def send_template_mail(sender, receiver, subject, templatename, templateattr={}, receivername=receivername, sendername=sendername, messageid=messageid) + def get_client_ip(request): """ Get the IP of the client. If the client is served through our Varnish caches, @@ -41,6 +43,7 @@ def varnish_purge_xkey(xkey): """ connection.cursor().execute("SELECT varnish_purge_xkey(%s)", (xkey, )) + def varnish_purge(url): """ Purge the specified URL from Varnish. Will add initial anchor to the URL, @@ -49,6 +52,7 @@ def varnish_purge(url): url = '^%s' % url connection.cursor().execute("SELECT varnish_purge(%s)", (url, )) + def varnish_purge_expr(expr): """ Purge the specified expression from Varnish. Does not modify the expression @@ -56,6 +60,7 @@ def varnish_purge_expr(expr): """ connection.cursor().execute("SELECT varnish_purge_expr(%s)", (expr, )) + def version_sort(l): """ map a directory name to a format that will show up sensibly in an ascii sort @@ -64,12 +69,12 @@ def version_sort(l): generally don't have that. """ mkey = l['link'] - m = re.match('v?([0-9]+)\.([0-9]+)\.([0-9]+)$',l['url']) + m = re.match('v?([0-9]+)\.([0-9]+)\.([0-9]+)$', l['url']) if m: - mkey = m.group(1) + '%02d' % int(m.group(2)) + '%02d' % int(m.group(3)); - m = re.match('v?([0-9]+)\.([0-9]+)$',l['url']) + mkey = m.group(1) + '%02d' % int(m.group(2)) + '%02d' % int(m.group(3)) + m = re.match('v?([0-9]+)\.([0-9]+)$', l['url']) if m: - mkey = m.group(1) + '%02d' % int(m.group(2)); + mkey = m.group(1) + '%02d' % int(m.group(2)) # SOOO ugly. But if it's v10 and up, just prefix it to get it higher if int(m.group(1)) >= 10: mkey = 'a' + mkey @@ -80,6 +85,7 @@ def version_sort(l): return mkey + def generate_random_token(): """ Generate a random token of 64 characters. This token will be diff --git a/pgweb/util/moderation.py b/pgweb/util/moderation.py index d67c9748..83d7f316 100644 --- a/pgweb/util/moderation.py +++ b/pgweb/util/moderation.py @@ -6,13 +6,16 @@ from pgweb.downloads.models import Product from pgweb.profserv.models import ProfessionalService from pgweb.quotes.models import Quote + # Pending moderation requests (including URLs for the admin interface)) def _get_unapproved_list(objecttype): objects = objecttype.objects.filter(approved=False) if not len(objects): return None - return { 'name': objects[0]._meta.verbose_name_plural, 'entries': - [{'url': '/admin/%s/%s/%s/' % (x._meta.app_label, x._meta.model_name, x.pk), 'title': unicode(x)} for x in objects] - } + return { + 'name': objects[0]._meta.verbose_name_plural, + 'entries': [{'url': '/admin/%s/%s/%s/' % (x._meta.app_label, x._meta.model_name, x.pk), 'title': unicode(x)} for x in objects] + } + def get_all_pending_moderations(): applist = [ @@ -22,5 +25,5 @@ def get_all_pending_moderations(): _get_unapproved_list(Product), _get_unapproved_list(ProfessionalService), _get_unapproved_list(Quote), - ] + ] return [x for x in applist if x] diff --git a/pgweb/util/signals.py b/pgweb/util/signals.py index 178ec04d..85d63023 100644 --- a/pgweb/util/signals.py +++ b/pgweb/util/signals.py @@ -8,6 +8,7 @@ from pgweb.util.middleware import get_current_user from pgweb.util.misc import varnish_purge from pgweb.mailqueue.util import send_simple_mail + def _build_url(obj): if obj.id: return "%s/admin/%s/%s/%s/" % ( @@ -23,24 +24,29 @@ def _build_url(obj): obj._meta.model_name, ) + def _get_full_text_diff(obj, oldobj): fieldlist = _get_all_notification_fields(obj) if not fieldlist: return "This object does not know how to express ifself." - s = "\n\n".join(["\n".join(filter(lambda x: not x.startswith('@@'), - difflib.unified_diff( - _get_attr_value(oldobj, n).splitlines(), - _get_attr_value(obj, n).splitlines(), - n=1, - lineterm='', - fromfile=n, - tofile=n, - )) + s = "\n\n".join(["\n".join( + filter( + lambda x: not x.startswith('@@'), + difflib.unified_diff( + _get_attr_value(oldobj, n).splitlines(), + _get_attr_value(obj, n).splitlines(), + n=1, + lineterm='', + fromfile=n, + tofile=n, + ) + ) ) for n in fieldlist if _get_attr_value(oldobj, n) != _get_attr_value(obj, n)]) if not s: return None return s + def _get_all_notification_fields(obj): if hasattr(obj, 'notify_fields'): return obj.notify_fields @@ -49,6 +55,7 @@ def _get_all_notification_fields(obj): # that are local to this model (not auto created) return [f.name for f in obj._meta.get_fields() if not f.name in ('approved', 'submitter', 'id', ) and not f.auto_created] + def _get_attr_value(obj, fieldname): # see if this is a Many-to-many field. If yes, we want to print # it out as a pretty list @@ -61,6 +68,7 @@ def _get_attr_value(obj, fieldname): # Return the value, or an empty tring if it's NULL (migrated records) return unicode(getattr(obj, fieldname)) or '' + def _get_full_text_representation(obj): fieldlist = _get_all_notification_fields(obj) if not fieldlist: @@ -68,6 +76,7 @@ def _get_full_text_representation(obj): return "\n".join([u'%s: %s' % (n, _get_attr_value(obj, n)) for n in fieldlist]) + def _get_notification_text(obj): try: oldobj = obj.__class__.objects.get(pk=obj.pk) @@ -108,6 +117,7 @@ def _get_notification_text(obj): return ('{0} id {1} has been modified'.format(obj._meta.verbose_name, obj.id), 'The following fields have been modified:\n\n%s' % diff) + def my_pre_save_handler(sender, **kwargs): instance = kwargs['instance'] if getattr(instance, 'send_notification', False) and get_current_user(): @@ -119,28 +129,30 @@ def my_pre_save_handler(sender, **kwargs): "%s by %s" % (subj, get_current_user()), cont) + def my_m2m_changed_handler(sender, **kwargs): instance = kwargs['instance'] if getattr(instance, 'send_m2m_notification', False) and get_current_user(): (cl, f) = sender.__name__.split('_') if not hasattr(instance, '_stored_m2m'): - instance._stored_m2m={} + instance._stored_m2m = {} if kwargs['action'] == 'pre_clear': - instance._stored_m2m[f] = set([unicode(t) for t in getattr(instance,f).all()]) + instance._stored_m2m[f] = set([unicode(t) for t in getattr(instance, f).all()]) elif kwargs['action'] == 'post_add': - newset = set([unicode(t) for t in getattr(instance,f).all()]) + newset = set([unicode(t) for t in getattr(instance, f).all()]) added = newset.difference(instance._stored_m2m.get(f, set())) removed = instance._stored_m2m.get(f, set()).difference(newset) subj = '{0} id {1} has been modified'.format(instance._meta.verbose_name, instance.id) if added or removed: send_simple_mail(settings.NOTIFICATION_FROM, - settings.NOTIFICATION_EMAIL, - "%s by %s" % (subj, get_current_user()), - "The following values for {0} were changed:\n\n{1}\n{2}\n\n".format( - instance._meta.get_field(f).verbose_name, - "\n".join([u"Added: %s" % a for a in added]), - "\n".join([u"Removed: %s" % r for r in removed]), - )) + settings.NOTIFICATION_EMAIL, + "%s by %s" % (subj, get_current_user()), + "The following values for {0} were changed:\n\n{1}\n{2}\n\n".format( + instance._meta.get_field(f).verbose_name, + "\n".join([u"Added: %s" % a for a in added]), + "\n".join([u"Removed: %s" % r for r in removed]), + )) + def my_pre_delete_handler(sender, **kwargs): instance = kwargs['instance'] @@ -151,7 +163,8 @@ def my_pre_delete_handler(sender, **kwargs): instance._meta.verbose_name, instance.id, get_current_user()), - _get_full_text_representation(instance)) + _get_full_text_representation(instance)) + def my_post_save_handler(sender, **kwargs): instance = kwargs['instance'] @@ -162,6 +175,7 @@ def my_post_save_handler(sender, **kwargs): purgelist = instance.purge_urls map(varnish_purge, purgelist) + def register_basic_signal_handlers(): pre_save.connect(my_pre_save_handler) pre_delete.connect(my_pre_delete_handler) diff --git a/pgweb/util/sitestruct.py b/pgweb/util/sitestruct.py index 64c04f15..40d46bc9 100644 --- a/pgweb/util/sitestruct.py +++ b/pgweb/util/sitestruct.py @@ -1,5 +1,6 @@ from django.conf import settings + def get_all_pages_struct(method='get_struct'): """ Return an iterator over all distinct pages on the site. @@ -13,7 +14,7 @@ def get_all_pages_struct(method='get_struct'): for app in settings.INSTALLED_APPS: if app.startswith('pgweb.'): try: - m = __import__(app+".struct", {}, {}, method) + m = __import__(app + ".struct", {}, {}, method) except: # Failed to import - probably module didnd't exist continue diff --git a/pgweb/util/templateloader.py b/pgweb/util/templateloader.py index 20b45cae..4fa2b9eb 100644 --- a/pgweb/util/templateloader.py +++ b/pgweb/util/templateloader.py @@ -9,12 +9,15 @@ except ImportError: _thread_locals = local() + def initialize_template_collection(): _thread_locals.templates = [] + def get_all_templates(): return getattr(_thread_locals, 'templates', []) + class TrackingTemplateLoader(django.template.loaders.base.Loader): def get_template_sources(self, template_name): _thread_locals.templates = getattr(_thread_locals, 'templates', []) + [template_name, ] diff --git a/tools/communityauth/generate_cryptkey.py b/tools/communityauth/generate_cryptkey.py index db7c7451..6e84e3b4 100755 --- a/tools/communityauth/generate_cryptkey.py +++ b/tools/communityauth/generate_cryptkey.py @@ -17,4 +17,3 @@ if __name__ == "__main__": r = Random.new() key = r.read(32) print base64.b64encode(key) - diff --git a/tools/communityauth/sample/django/auth.py b/tools/communityauth/sample/django/auth.py index 2ae543a6..452612c3 100644 --- a/tools/communityauth/sample/django/auth.py +++ b/tools/communityauth/sample/django/auth.py @@ -35,6 +35,7 @@ from Crypto.Hash import SHA from Crypto import Random import time + class AuthBackend(ModelBackend): # We declare a fake backend that always fails direct authentication - # since we should never be using direct authentication in the first place! @@ -62,16 +63,17 @@ def login(request): r = Random.new() iv = r.read(16) encryptor = AES.new(SHA.new(settings.SECRET_KEY).digest()[:16], AES.MODE_CBC, iv) - cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) # pad to 16 bytes + cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16))) # pad to 16 bytes return HttpResponseRedirect("%s?d=%s$%s" % ( - settings.PGAUTH_REDIRECT, - base64.b64encode(iv, "-_"), - base64.b64encode(cipher, "-_"), - )) + settings.PGAUTH_REDIRECT, + base64.b64encode(iv, "-_"), + base64.b64encode(cipher, "-_"), + )) else: return HttpResponseRedirect(settings.PGAUTH_REDIRECT) + # Handle logout requests by logging out of this site and then # redirecting to log out from the main site as well. def logout(request): @@ -79,6 +81,7 @@ def logout(request): django_logout(request) return HttpResponseRedirect("%slogout/" % settings.PGAUTH_REDIRECT) + # Receive an authentication response from the main website and try # to log the user in. def auth_receive(request): @@ -120,7 +123,7 @@ def auth_receive(request): changed = True if user.email != data['e'][0]: user.email = data['e'][0] - changed= True + changed = True if changed: user.save() except User.DoesNotExist: @@ -208,7 +211,7 @@ def user_search(searchterm=None, userid=None): u = urllib.urlopen('%ssearch/?%s' % ( settings.PGAUTH_REDIRECT, urllib.urlencode(q), - )) + )) (ivs, datas) = u.read().split('&') u.close() @@ -221,6 +224,7 @@ def user_search(searchterm=None, userid=None): return j + # Import a user into the local authentication system. Will initially # make a search for it, and if anything other than one entry is returned # the import will fail. diff --git a/tools/communityauth/test_auth.py b/tools/communityauth/test_auth.py index d5c60c71..4417c1de 100755 --- a/tools/communityauth/test_auth.py +++ b/tools/communityauth/test_auth.py @@ -46,7 +46,7 @@ if __name__ == "__main__": 'f': options.first, 'l': options.last, 'e': options.email, - } + } if options.suburl: info['su'] = options.suburl @@ -54,15 +54,15 @@ if __name__ == "__main__": # the first block more random.. # Since this is a fake authentication, put it 5 minutes into the future to # give more time to copy/paste it. - s = "t=%s&%s" % (int(time.time()+300), urllib.urlencode(info)) + s = "t=%s&%s" % (int(time.time() + 300), urllib.urlencode(info)) r = Random.new() iv = r.read(16) encryptor = AES.new(base64.b64decode(options.key), AES.MODE_CBC, iv) - cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) + cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16))) print "Paste the following after the receiving url:" print "?i=%s&d=%s" % ( base64.b64encode(iv, "-_"), base64.b64encode(cipher, "-_"), - ) + ) diff --git a/tools/docs/docload.py b/tools/docs/docload.py index 1d467d37..7a2e0302 100755 --- a/tools/docs/docload.py +++ b/tools/docs/docload.py @@ -17,17 +17,19 @@ quiet = False re_titlematch = re.compile('([^<]+)', re.IGNORECASE) -## Load a single page + +# Load a single page def load_doc_file(filename, f): - tidyopts = dict(drop_proprietary_attributes=1, - alt_text='', - hide_comments=1, - output_xhtml=1, - show_body_only=1, - clean=1, - char_encoding='utf8', - indent='auto', - ) + tidyopts = dict( + drop_proprietary_attributes=1, + alt_text='', + hide_comments=1, + output_xhtml=1, + show_body_only=1, + clean=1, + char_encoding='utf8', + indent='auto', + ) # Postgres 10 started using xml toolchain and now produces docmentation in utf8. So we need # to figure out which version it is. @@ -56,7 +58,7 @@ def load_doc_file(filename, f): if not quiet: print "--- file: %s (%s) ---" % (filename, title) s = tidy.parseString(contents.encode('utf-8'), **tidyopts) - curs.execute("INSERT INTO docs (file, version, title, content) VALUES (%(f)s, %(v)s, %(t)s, %(c)s)",{ + curs.execute("INSERT INTO docs (file, version, title, content) VALUES (%(f)s, %(v)s, %(t)s, %(c)s)", { 'f': filename, 'v': ver, 't': title, @@ -65,8 +67,8 @@ def load_doc_file(filename, f): global pagecount pagecount += 1 -## Main execution +# Main execution parser = OptionParser(usage="usage: %prog [options] ") parser.add_option("-q", "--quiet", action="store_true", dest="quiet", help="Run quietly") @@ -139,4 +141,3 @@ connection.commit() connection.close() if not quiet: print "Done (%i pages)." % pagecount - diff --git a/tools/ftp/spider_ftp.py b/tools/ftp/spider_ftp.py index abaef4fc..d3d12dd6 100755 --- a/tools/ftp/spider_ftp.py +++ b/tools/ftp/spider_ftp.py @@ -19,12 +19,14 @@ exclude_roots = ['/repos', ] allnodes = {} + def read_file(fn): f = codecs.open(fn, 'r', encoding='utf-8', errors='replace') t = f.read() f.close() return t + def parse_directory(dirname, rootlen): mynode = {} for f in os.listdir(dirname): @@ -39,7 +41,7 @@ def parse_directory(dirname, rootlen): mynode[f] = { 't': 'l', 'd': os.readlink(fn).strip("/"), - } + } else: # This is a subdirectory, recurse into it, unless it happens # to be on our exclude list. @@ -61,6 +63,7 @@ def parse_directory(dirname, rootlen): allnodes[dirname[rootlen:].strip("/")] = mynode + def Usage(): print "Usage: spider_ftp.py " print "" @@ -68,6 +71,7 @@ def Usage(): print "to that URL instead of written to the filesystem." sys.exit(1) + if len(sys.argv) != 3: Usage() parse_directory(sys.argv[1], len(sys.argv[1])) diff --git a/tools/ftp/spider_yum.py b/tools/ftp/spider_yum.py index bc688d93..7b3a5869 100755 --- a/tools/ftp/spider_yum.py +++ b/tools/ftp/spider_yum.py @@ -26,6 +26,7 @@ platform_sort = { } archs = ['x86_64', 'i386', 'i686', 'ppc64le'] + def generate_platform(dirname, familyprefix, ver, installer, systemd): for f in platform_names.keys(): yield ('%s-%s' % (f, ver), { @@ -34,17 +35,19 @@ def generate_platform(dirname, familyprefix, ver, installer, systemd): 'f': f, 'i': installer, 'd': systemd, - 's': platform_sort[f]*1000-ver, + 's': platform_sort[f] * 1000 - ver, 'found': False, - }) + }) + def get_redhat_systemd(ver): return (ver >= 7) + platforms = {} -for v in range(5, 7+1): +for v in range(5, 7 + 1): platforms.update(dict(generate_platform('redhat', 'rhel', v, 'yum', get_redhat_systemd(v)))) -for v in range(24, 30+1): +for v in range(24, 30 + 1): platforms.update(dict(generate_platform('fedora', 'fedora', v, 'dnf', True))) re_reporpm = re.compile('^pgdg-([a-z0-9-]+)([0-9]{2})-[^-]+-(\d+)\.noarch\.rpm$') @@ -82,12 +85,12 @@ if __name__ == "__main__": break else: # DEBUG -# print "%s (%s) not found in platform list" % (familypath, shortdist) + # print "%s (%s) not found in platform list" % (familypath, shortdist) pass # Filter all platforms that are not used - platforms = {k:v for k,v in platforms.iteritems() if v['found']} - for k,v in platforms.iteritems(): + platforms = {k: v for k, v in platforms.iteritems() if v['found']} + for k, v in platforms.iteritems(): del v['found'] j = json.dumps({'platforms': platforms, 'reporpms': reporpms}) diff --git a/tools/localhtmlvalidate/localhtmlvalidate.py b/tools/localhtmlvalidate/localhtmlvalidate.py index ff08ae48..56fe8920 100755 --- a/tools/localhtmlvalidate/localhtmlvalidate.py +++ b/tools/localhtmlvalidate/localhtmlvalidate.py @@ -19,7 +19,8 @@ import httplib import re import HTMLParser -BOUNDARY="-=--=foobar-=--=" +BOUNDARY = "-=--=foobar-=--=" + def encode_multipart_formdata(fields, files): L = [] @@ -39,7 +40,8 @@ def encode_multipart_formdata(fields, files): body = "\r\n".join(L) return body -if __name__=="__main__": + +if __name__ == "__main__": if len(sys.argv) != 2: print "Usage: localhtmlvalidate.py " sys.exit(1) @@ -53,13 +55,15 @@ if __name__=="__main__": firstline = 0 # Generate a form body - body = encode_multipart_formdata([ + body = encode_multipart_formdata( + [ ('charset', 'utf-8'), ('doctype', 'inline'), ('group', '0'), ('verbose', '1'), - ], - [('uploaded_file', 'test.html', contents)]) + ], + [('uploaded_file', 'test.html', contents)] + ) # Now submit it to the w3c validator h = httplib.HTTP("validator.w3.org") @@ -92,5 +96,3 @@ if __name__=="__main__": print "Unknown status: %s" % headers['x-w3c-validator-status'] print headers sys.exit(1) - - diff --git a/tools/purgehook/purgehook.py b/tools/purgehook/purgehook.py index bf2e9f9e..4584635d 100755 --- a/tools/purgehook/purgehook.py +++ b/tools/purgehook/purgehook.py @@ -13,7 +13,7 @@ from ConfigParser import ConfigParser import psycopg2 # Templates that we don't want to ban automatically -BANNED_TEMPLATES=( +BANNED_TEMPLATES = ( 'base/base.html', ) diff --git a/tools/search/crawler/lib/archives.py b/tools/search/crawler/lib/archives.py index 7a2014ab..7dbed9a5 100644 --- a/tools/search/crawler/lib/archives.py +++ b/tools/search/crawler/lib/archives.py @@ -8,6 +8,7 @@ import time from lib.log import log from lib.parsers import ArchivesParser + class MultiListCrawler(object): def __init__(self, lists, conn, status_interval=30, commit_interval=500): self.lists = lists @@ -27,8 +28,8 @@ class MultiListCrawler(object): for listid, listname in self.lists: if full: # Generate a sequence of everything to index - for year in range(1997, datetime.datetime.now().year+1): - for month in range(1,13): + for year in range(1997, datetime.datetime.now().year + 1): + for month in range(1, 13): self.queue.put((listid, listname, year, month, -1)) elif month: # Do one specific month @@ -48,18 +49,18 @@ class MultiListCrawler(object): curs = self.conn.cursor() curr = datetime.date.today() if curr.month == 1: - prev = datetime.date(curr.year-1, 12, 1) + prev = datetime.date(curr.year - 1, 12, 1) else: - prev = datetime.date(curr.year, curr.month-1, 1) + prev = datetime.date(curr.year, curr.month - 1, 1) for d in curr, prev: # Figure out what the highest indexed page in this # month is. curs.execute("SELECT max(msgnum) FROM messages WHERE list=%(list)s AND year=%(year)s AND month=%(month)s", { - 'list': listid, - 'year': d.year, - 'month': d.month, - }) + 'list': listid, + 'year': d.year, + 'month': d.month, + }) x = curs.fetchall() if x[0][0] != None: maxmsg = x[0][0] @@ -69,11 +70,11 @@ class MultiListCrawler(object): for x in range(5): t = threading.Thread(name="Indexer %s" % x, - target = lambda: self.crawl_from_queue()) - t.daemon= True + target=lambda: self.crawl_from_queue()) + t.daemon = True t.start() - t = threading.Thread(name="statusthread", target = lambda: self.status_thread()) + t = threading.Thread(name="statusthread", target=lambda: self.status_thread()) t.daemon = True t.start() @@ -93,10 +94,10 @@ class MultiListCrawler(object): with self.counterlock: log("Indexed %s messages so far (%s active threads, %s months still queued, %.1f msg/sec)" % ( self.counter, - threading.active_count() - 2 , # main thread + status thread + threading.active_count() - 2, # main thread + status thread self.queue.qsize(), self.counter / (nowtime - starttime), - )) + )) # Commit every 500 messages if self.counter - lastcommit > self.commit_interval: lastcommit = self.counter @@ -152,15 +153,15 @@ class MultiListCrawler(object): # We return true to move on to the next message anyway! return True curs.execute("INSERT INTO messages (list, year, month, msgnum, date, subject, author, txt, fti) VALUES (%(listid)s, %(year)s, %(month)s, %(msgnum)s, %(date)s, %(subject)s, %(author)s, %(txt)s, setweight(to_tsvector('pg', %(subject)s), 'A') || to_tsvector('pg', %(txt)s))", { - 'listid': listid, - 'year': year, - 'month': month, - 'msgnum': msgnum, - 'date': p.date, - 'subject': p.subject[:127], - 'author': p.author[:127], - 'txt': p.body, - }) + 'listid': listid, + 'year': year, + 'month': month, + 'msgnum': msgnum, + 'date': p.date, + 'subject': p.subject[:127], + 'author': p.author[:127], + 'txt': p.body, + }) with self.counterlock: self.counter += 1 diff --git a/tools/search/crawler/lib/basecrawler.py b/tools/search/crawler/lib/basecrawler.py index 2154e0b1..173cf0c8 100644 --- a/tools/search/crawler/lib/basecrawler.py +++ b/tools/search/crawler/lib/basecrawler.py @@ -11,6 +11,7 @@ import threading from lib.log import log from lib.parsers import GenericHtmlParser, lossy_unicode + class BaseSiteCrawler(object): def __init__(self, hostname, dbconn, siteid, serverip=None, https=False): self.hostname = hostname @@ -37,11 +38,11 @@ class BaseSiteCrawler(object): # Fire off worker threads for x in range(5): t = threading.Thread(name="Indexer %s" % x, - target = lambda: self.crawl_from_queue()) + target=lambda: self.crawl_from_queue()) t.daemon = True t.start() - t = threading.Thread(name="statusthread", target = lambda: self.status_thread()) + t = threading.Thread(name="statusthread", target=lambda: self.status_thread()) t.daemon = True t.start() @@ -53,9 +54,9 @@ class BaseSiteCrawler(object): # Remove all pages that we didn't crawl curs = self.dbconn.cursor() curs.execute("DELETE FROM webpages WHERE site=%(site)s AND NOT suburl=ANY(%(urls)s)", { - 'site': self.siteid, - 'urls': self.pages_crawled.keys(), - }) + 'site': self.siteid, + 'urls': self.pages_crawled.keys(), + }) if curs.rowcount: log("Deleted %s pages no longer accessible" % curs.rowcount) self.pages_deleted += curs.rowcount @@ -77,7 +78,7 @@ class BaseSiteCrawler(object): threading.active_count() - 2, self.queue.qsize(), len(self.pages_crawled) / (nowtime - starttime), - )) + )) def crawl_from_queue(self): while not self.stopevent.is_set(): @@ -92,7 +93,7 @@ class BaseSiteCrawler(object): return False def crawl_page(self, url, relprio, internal): - if self.pages_crawled.has_key(url) or self.pages_crawled.has_key(url+"/"): + if self.pages_crawled.has_key(url) or self.pages_crawled.has_key(url + "/"): return if self.exclude_url(url): @@ -110,9 +111,9 @@ class BaseSiteCrawler(object): # Page failed to load or was a redirect, so remove from database curs = self.dbconn.cursor() curs.execute("DELETE FROM webpages WHERE site=%(id)s AND suburl=%(url)s", { - 'id': self.siteid, - 'url': url, - }) + 'id': self.siteid, + 'url': url, + }) with self.counterlock: self.pages_deleted += curs.rowcount @@ -145,7 +146,7 @@ class BaseSiteCrawler(object): 'url': url, 'relprio': relprio, 'internal': internal, - } + } curs = self.dbconn.cursor() curs.execute("UPDATE webpages SET title=%(title)s, txt=%(txt)s, fti=setweight(to_tsvector('public.pg', %(title)s), 'A') || to_tsvector('public.pg', %(txt)s), lastscanned=%(lastmod)s, relprio=%(relprio)s, isinternal=%(internal)s WHERE site=%(site)s AND suburl=%(url)s", params) if curs.rowcount != 1: @@ -157,10 +158,11 @@ class BaseSiteCrawler(object): self.pages_updated += 1 ACCEPTED_CONTENTTYPES = ("text/html", "text/plain", ) + def accept_contenttype(self, contenttype): # Split apart if there is a "; charset=" in it if contenttype.find(";"): - contenttype = contenttype.split(';',2)[0] + contenttype = contenttype.split(';', 2)[0] return contenttype in self.ACCEPTED_CONTENTTYPES def fetch_page(self, url): @@ -180,8 +182,8 @@ class BaseSiteCrawler(object): else: h = httplib.HTTPSConnection(host=self.hostname, port=443, strict=True, timeout=10, context=ssl._create_unverified_context()) h.putrequest("GET", url) - h.putheader("User-agent","pgsearch/0.2") - h.putheader("Connection","close") + h.putheader("User-agent", "pgsearch/0.2") + h.putheader("Connection", "close") if self.scantimes.has_key(url): h.putheader("If-Modified-Since", formatdate(time.mktime(self.scantimes[url].timetuple()))) h.endheaders() @@ -209,7 +211,7 @@ class BaseSiteCrawler(object): # No redirect at all found, becaue it was invalid? return (2, None, None) else: - #print "Url %s returned status %s" % (url, resp.status) + # print "Url %s returned status %s" % (url, resp.status) pass except Exception, e: log("Exception when loading url %s: %s" % (url, e)) diff --git a/tools/search/crawler/lib/genericsite.py b/tools/search/crawler/lib/genericsite.py index 718b1f0d..aa8b9e09 100644 --- a/tools/search/crawler/lib/genericsite.py +++ b/tools/search/crawler/lib/genericsite.py @@ -3,6 +3,7 @@ import re from basecrawler import BaseSiteCrawler from parsers import RobotsParser + class GenericSiteCrawler(BaseSiteCrawler): def __init__(self, hostname, dbconn, siteid, https=False): super(GenericSiteCrawler, self).__init__(hostname, dbconn, siteid, https=https) @@ -19,8 +20,8 @@ class GenericSiteCrawler(BaseSiteCrawler): # robots.txt ones) curs = self.dbconn.cursor() curs.execute("SELECT suburlre FROM site_excludes WHERE site=%(site)s", { - 'site': self.siteid, - }) + 'site': self.siteid, + }) self.extra_excludes = [re.compile(x) for x, in curs.fetchall()] # We *always* crawl the root page, of course @@ -45,7 +46,7 @@ class GenericSiteCrawler(BaseSiteCrawler): def post_process_page(self, url): for l in self.resolve_links(self.page.links, url): - if self.pages_crawled.has_key(l) or self.pages_crawled.has_key(l+"/"): + if self.pages_crawled.has_key(l) or self.pages_crawled.has_key(l + "/"): continue if self.exclude_url(l): continue diff --git a/tools/search/crawler/lib/log.py b/tools/search/crawler/lib/log.py index ce566034..08e8de5c 100644 --- a/tools/search/crawler/lib/log.py +++ b/tools/search/crawler/lib/log.py @@ -1,6 +1,7 @@ # Yes, this is trivial, but we might want to put something # more here in the future :) import datetime + + def log(msg): print "%s: %s" % (datetime.datetime.now(), msg) - diff --git a/tools/search/crawler/lib/parsers.py b/tools/search/crawler/lib/parsers.py index 89f0ff12..b176edd3 100644 --- a/tools/search/crawler/lib/parsers.py +++ b/tools/search/crawler/lib/parsers.py @@ -8,6 +8,7 @@ from HTMLParser import HTMLParser from lib.log import log + class GenericHtmlParser(HTMLParser): def __init__(self): HTMLParser.__init__(self) @@ -22,7 +23,7 @@ class GenericHtmlParser(HTMLParser): if tag == "body": self.inbody = True if tag == "a": - for a,v in attrs: + for a, v in attrs: if a == "href": self.links.append(v) @@ -31,6 +32,7 @@ class GenericHtmlParser(HTMLParser): self.inbody = False DATA_IGNORE_TAGS = ("script",) + def handle_data(self, data): d = data.strip() if len(d) < 2: @@ -59,6 +61,7 @@ class GenericHtmlParser(HTMLParser): class ArchivesParser(object): rematcher = re.compile(".*.*.*(.*)", re.DOTALL) hp = HTMLParser() + def __init__(self): self.subject = None self.author = None @@ -79,6 +82,7 @@ class ArchivesParser(object): _date_multi_re = re.compile(' \((\w+\s\w+|)\)$') _date_trailing_envelope = re.compile('\s+\(envelope.*\)$') + def parse_date(self, d): # For some reason, we have dates that look like this: # http://archives.postgresql.org/pgsql-bugs/1999-05/msg00018.php @@ -126,11 +130,13 @@ class ArchivesParser(object): # So we copy the brokenness here. # This code is from MHonArc/ewhutil.pl, mrot13() _arot13_trans = dict(zip(map(ord, - u'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[abcdefghijklmnopqrstuvwxyz'), - u'NOPQRSTUVWXYZ[@ABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm')) + u'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[abcdefghijklmnopqrstuvwxyz'), + u'NOPQRSTUVWXYZ[@ABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm')) + def almost_rot13(self, s): return unicode(s).translate(self._arot13_trans) + class RobotsParser(object): def __init__(self, url): try: diff --git a/tools/search/crawler/lib/sitemapsite.py b/tools/search/crawler/lib/sitemapsite.py index 25d6fb72..fddd3ffe 100644 --- a/tools/search/crawler/lib/sitemapsite.py +++ b/tools/search/crawler/lib/sitemapsite.py @@ -5,6 +5,7 @@ import dateutil.parser from lib.log import log from lib.basecrawler import BaseSiteCrawler + class SitemapParser(object): def __init__(self): self.urls = [] @@ -19,7 +20,7 @@ class SitemapParser(object): self.getlastmod = False self.currstr = "" self.internal = False - self.parser.StartElementHandler = lambda name,attrs: self.processelement(name,attrs) + self.parser.StartElementHandler = lambda name, attrs: self.processelement(name, attrs) self.parser.EndElementHandler = lambda name: self.processendelement(name) self.parser.CharacterDataHandler = lambda data: self.processcharacterdata(data) self.internal = internal @@ -58,6 +59,7 @@ class SitemapParser(object): if self.geturl or self.getprio or self.getlastmod: self.currstr += data + class SitemapSiteCrawler(BaseSiteCrawler): def __init__(self, hostname, dbconn, siteid, serverip, https=False): super(SitemapSiteCrawler, self).__init__(hostname, dbconn, siteid, serverip, https) @@ -81,7 +83,7 @@ class SitemapSiteCrawler(BaseSiteCrawler): for url, prio, lastmod, internal in p.urls: # Advance 8 characters - length of https://. - url = url[len(self.hostname)+8:] + url = url[len(self.hostname) + 8:] if lastmod: if self.scantimes.has_key(url): if lastmod < self.scantimes[url]: diff --git a/tools/search/crawler/lib/threadwrapper.py b/tools/search/crawler/lib/threadwrapper.py index b70571dd..87c70265 100644 --- a/tools/search/crawler/lib/threadwrapper.py +++ b/tools/search/crawler/lib/threadwrapper.py @@ -1,5 +1,6 @@ from multiprocessing import Process + # Wrap a method call in a different process, so that we can process # keyboard interrupts and actually terminate it if we have to. # python threading makes it often impossible to Ctlr-C it otherwise.. diff --git a/tools/search/crawler/listcrawler.py b/tools/search/crawler/listcrawler.py index ea11bec2..44508610 100755 --- a/tools/search/crawler/listcrawler.py +++ b/tools/search/crawler/listcrawler.py @@ -10,23 +10,24 @@ import psycopg2 import sys import time + def doit(opt): cp = ConfigParser() cp.read("search.ini") psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) - conn = psycopg2.connect(cp.get("search","db")) + conn = psycopg2.connect(cp.get("search", "db")) curs = conn.cursor() if opt.list: # Multiple lists can be specified with a comma separator (no spaces) curs.execute("SELECT id,name FROM lists WHERE name=ANY(%(names)s)", { - 'names': opt.list.split(','), - }) + 'names': opt.list.split(','), + }) else: curs.execute("SELECT id,name FROM lists WHERE active ORDER BY id") - listinfo = [(id,name) for id,name in curs.fetchall()] + listinfo = [(id, name) for id, name in curs.fetchall()] c = MultiListCrawler(listinfo, conn, opt.status_interval, opt.commit_interval) n = c.crawl(opt.full, opt.month) @@ -39,7 +40,8 @@ def doit(opt): log("Indexed %s messages" % n) time.sleep(1) -if __name__=="__main__": + +if __name__ == "__main__": parser = OptionParser() parser.add_option("-l", "--list", dest='list', help="Crawl only this list") parser.add_option("-m", "--month", dest='month', help="Crawl only this month") diff --git a/tools/search/crawler/listsync.py b/tools/search/crawler/listsync.py index c5bc72c6..815d24ae 100755 --- a/tools/search/crawler/listsync.py +++ b/tools/search/crawler/listsync.py @@ -7,11 +7,11 @@ import psycopg2 import urllib import simplejson as json -if __name__=="__main__": +if __name__ == "__main__": cp = ConfigParser() cp.read("search.ini") psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) - conn = psycopg2.connect(cp.get("search","db")) + conn = psycopg2.connect(cp.get("search", "db")) curs = conn.cursor() u = urllib.urlopen("http://%s/community/lists/listinfo/" % cp.get("search", "web")) diff --git a/tools/search/crawler/webcrawler.py b/tools/search/crawler/webcrawler.py index 2f895f17..f3f139a7 100755 --- a/tools/search/crawler/webcrawler.py +++ b/tools/search/crawler/webcrawler.py @@ -10,9 +10,10 @@ from ConfigParser import ConfigParser import psycopg2 import time + def doit(): psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) - conn = psycopg2.connect(cp.get("search","db")) + conn = psycopg2.connect(cp.get("search", "db")) curs = conn.cursor() @@ -34,7 +35,7 @@ def doit(): time.sleep(1) -if __name__=="__main__": +if __name__ == "__main__": cp = ConfigParser() cp.read("search.ini") diff --git a/tools/varnishqueue/nagios_check.py b/tools/varnishqueue/nagios_check.py index b9fa622e..a7cbd011 100755 --- a/tools/varnishqueue/nagios_check.py +++ b/tools/varnishqueue/nagios_check.py @@ -5,9 +5,9 @@ import psycopg2 from datetime import timedelta # Up to 5 minutes delay is ok -WARNING_THRESHOLD=timedelta(minutes=5) +WARNING_THRESHOLD = timedelta(minutes=5) # More than 15 minutes something is definitely wrong -CRITICAL_THRESHOLD=timedelta(minutes=15) +CRITICAL_THRESHOLD = timedelta(minutes=15) if __name__ == "__main__": if len(sys.argv) != 2: diff --git a/tools/varnishqueue/varnish_queue.py b/tools/varnishqueue/varnish_queue.py index c66d87dc..31096759 100755 --- a/tools/varnishqueue/varnish_queue.py +++ b/tools/varnishqueue/varnish_queue.py @@ -15,6 +15,7 @@ import logging import psycopg2 from setproctitle import setproctitle + def do_purge(consumername, headers): try: conn = httplib.HTTPSConnection('%s.postgresql.org' % consumername) @@ -30,6 +31,7 @@ def do_purge(consumername, headers): return False return True + def worker(consumerid, consumername, dsn): logging.info("Starting worker for %s" % consumername) setproctitle("varnish_queue - worker for %s" % consumername) @@ -85,7 +87,7 @@ def worker(consumerid, consumername, dsn): # Nothing, so roll back the transaction and wait conn.rollback() - select.select([conn],[],[],5*60) + select.select([conn], [], [], 5 * 60) conn.poll() while conn.notifies: conn.notifies.pop() @@ -104,7 +106,7 @@ def housekeeper(dsn): conn.commit() else: conn.rollback() - time.sleep(5*60) + time.sleep(5 * 60) if __name__ == "__main__":