From 87237f6536a1e6df112ca34818cf8459cb04fc68 Mon Sep 17 00:00:00 2001
From: Magnus Hagander
Please confirm that you consent to this sharing.'.format(org.orgname, org.orgname), - 'savebutton': 'Proceed with login', - }) + return render_pgweb(request, 'account', 'base/form.html', { + 'form': form, + 'operation': 'Authentication', + 'form_intro': 'The site you are about to log into is run by {0}. If you choose to proceed with this authentication, your name and email address will be shared with {1}.
Please confirm that you consent to this sharing.'.format(org.orgname, org.orgname),
+ 'savebutton': 'Proceed with login',
+ })
def _encrypt_site_response(site, s):
- # Encrypt it with the shared key (and IV!)
- r = Random.new()
- iv = r.read(16) # Always 16 bytes for AES
- encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
- cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
+ # Encrypt it with the shared key (and IV!)
+ r = Random.new()
+ iv = r.read(16) # Always 16 bytes for AES
+ encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
+ cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
- # Base64-encode the response, just to be consistent
- return "%s&%s" % (
- base64.b64encode(iv, '-_'),
- base64.b64encode(cipher, '-_'),
- )
+ # Base64-encode the response, just to be consistent
+ return "%s&%s" % (
+ base64.b64encode(iv, '-_'),
+ base64.b64encode(cipher, '-_'),
+ )
def communityauth_search(request, siteid):
- # Perform a search for users. The response will be encrypted with the site
- # key to prevent abuse, therefor we need the site.
- site = get_object_or_404(CommunityAuthSite, pk=siteid)
+ # Perform a search for users. The response will be encrypted with the site
+ # key to prevent abuse, therefor we need the site.
+ site = get_object_or_404(CommunityAuthSite, pk=siteid)
- q = Q(is_active=True)
- if request.GET.has_key('s') and request.GET['s']:
- # General search term, match both name and email
- q = q & (Q(email__icontains=request.GET['s']) | Q(first_name__icontains=request.GET['s']) | Q(last_name__icontains=request.GET['s']))
- elif request.GET.has_key('e') and request.GET['e']:
- q = q & Q(email__icontains=request.GET['e'])
- elif request.GET.has_key('n') and request.GET['n']:
- q = q & (Q(first_name__icontains=request.GET['n']) | Q(last_name__icontains=request.GET['n']))
- elif request.GET.has_key('u') and request.GET['u']:
- q = q & Q(username=request.GET['u'])
- else:
- raise Http404('No search term specified')
+ q = Q(is_active=True)
+ if request.GET.has_key('s') and request.GET['s']:
+ # General search term, match both name and email
+ q = q & (Q(email__icontains=request.GET['s']) | Q(first_name__icontains=request.GET['s']) | Q(last_name__icontains=request.GET['s']))
+ elif request.GET.has_key('e') and request.GET['e']:
+ q = q & Q(email__icontains=request.GET['e'])
+ elif request.GET.has_key('n') and request.GET['n']:
+ q = q & (Q(first_name__icontains=request.GET['n']) | Q(last_name__icontains=request.GET['n']))
+ elif request.GET.has_key('u') and request.GET['u']:
+ q = q & Q(username=request.GET['u'])
+ else:
+ raise Http404('No search term specified')
- users = User.objects.filter(q)
+ users = User.objects.filter(q)
- j = json.dumps([{'u': u.username, 'e': u.email, 'f': u.first_name, 'l': u.last_name} for u in users])
+ j = json.dumps([{'u': u.username, 'e': u.email, 'f': u.first_name, 'l': u.last_name} for u in users])
- return HttpResponse(_encrypt_site_response(site, j))
+ return HttpResponse(_encrypt_site_response(site, j))
def communityauth_getkeys(request, siteid, since=None):
- # Get any updated ssh keys for community accounts.
- # The response will be encrypted with the site key to prevent abuse,
- # therefor we need the site.
- site = get_object_or_404(CommunityAuthSite, pk=siteid)
+ # Get any updated ssh keys for community accounts.
+ # The response will be encrypted with the site key to prevent abuse,
+ # therefor we need the site.
+ site = get_object_or_404(CommunityAuthSite, pk=siteid)
- if since:
- keys = UserProfile.objects.select_related('user').filter(lastmodified__gte=datetime.fromtimestamp(int(since.replace('/', '')))).exclude(sshkey='')
- else:
- keys = UserProfile.objects.select_related('user').all().exclude(sshkey='')
+ if since:
+ keys = UserProfile.objects.select_related('user').filter(lastmodified__gte=datetime.fromtimestamp(int(since.replace('/', '')))).exclude(sshkey='')
+ else:
+ keys = UserProfile.objects.select_related('user').all().exclude(sshkey='')
- j = json.dumps([{'u': k.user.username, 's': k.sshkey} for k in keys])
+ j = json.dumps([{'u': k.user.username, 's': k.sshkey} for k in keys])
- return HttpResponse(_encrypt_site_response(site, j))
+ return HttpResponse(_encrypt_site_response(site, j))
diff --git a/pgweb/contributors/admin.py b/pgweb/contributors/admin.py
index 9f2204e7..2f0ec4f2 100644
--- a/pgweb/contributors/admin.py
+++ b/pgweb/contributors/admin.py
@@ -8,20 +8,20 @@ from pgweb.core.lookups import UserLookup
from models import Contributor, ContributorType
class ContributorAdminForm(forms.ModelForm):
- class Meta:
- model = Contributor
- exclude = ()
- widgets = {
- 'user': AutoCompleteSelectWidget(lookup_class=UserLookup),
- }
+ class Meta:
+ model = Contributor
+ exclude = ()
+ widgets = {
+ 'user': AutoCompleteSelectWidget(lookup_class=UserLookup),
+ }
- def __init__(self, *args, **kwargs):
- super(ContributorAdminForm, self).__init__(*args, **kwargs)
- self.fields['user'].widget.can_add_related = False
- self.fields['user'].widget.can_change_related = False
+ def __init__(self, *args, **kwargs):
+ super(ContributorAdminForm, self).__init__(*args, **kwargs)
+ self.fields['user'].widget.can_add_related = False
+ self.fields['user'].widget.can_change_related = False
class ContributorAdmin(admin.ModelAdmin):
- form = ContributorAdminForm
+ form = ContributorAdminForm
admin.site.register(ContributorType)
admin.site.register(Contributor, ContributorAdmin)
diff --git a/pgweb/contributors/models.py b/pgweb/contributors/models.py
index 789e203e..43cd3bae 100644
--- a/pgweb/contributors/models.py
+++ b/pgweb/contributors/models.py
@@ -2,36 +2,36 @@ from django.db import models
from django.contrib.auth.models import User
class ContributorType(models.Model):
- typename = models.CharField(max_length=32, null=False, blank=False)
- sortorder = models.IntegerField(null=False, default=100)
- extrainfo = models.TextField(null=True, blank=True)
- detailed = models.BooleanField(null=False, default=True)
- showemail = models.BooleanField(null=False, default=True)
+ typename = models.CharField(max_length=32, null=False, blank=False)
+ sortorder = models.IntegerField(null=False, default=100)
+ extrainfo = models.TextField(null=True, blank=True)
+ detailed = models.BooleanField(null=False, default=True)
+ showemail = models.BooleanField(null=False, default=True)
- purge_urls = ('/community/contributors/', )
+ purge_urls = ('/community/contributors/', )
- def __unicode__(self):
- return self.typename
+ def __unicode__(self):
+ return self.typename
- class Meta:
- ordering = ('sortorder',)
+ class Meta:
+ ordering = ('sortorder',)
class Contributor(models.Model):
- ctype = models.ForeignKey(ContributorType)
- lastname = models.CharField(max_length=100, null=False, blank=False)
- firstname = models.CharField(max_length=100, null=False, blank=False)
- email = models.EmailField(null=False, blank=True)
- company = models.CharField(max_length=100, null=True, blank=True)
- companyurl = models.URLField(max_length=100, null=True, blank=True, verbose_name='Company URL')
- location = models.CharField(max_length=100, null=True, blank=True)
- contribution = models.TextField(null=True, blank=True)
- user = models.ForeignKey(User, null=True, blank=True)
+ ctype = models.ForeignKey(ContributorType)
+ lastname = models.CharField(max_length=100, null=False, blank=False)
+ firstname = models.CharField(max_length=100, null=False, blank=False)
+ email = models.EmailField(null=False, blank=True)
+ company = models.CharField(max_length=100, null=True, blank=True)
+ companyurl = models.URLField(max_length=100, null=True, blank=True, verbose_name='Company URL')
+ location = models.CharField(max_length=100, null=True, blank=True)
+ contribution = models.TextField(null=True, blank=True)
+ user = models.ForeignKey(User, null=True, blank=True)
- send_notification=True
- purge_urls = ('/community/contributors/', )
+ send_notification=True
+ purge_urls = ('/community/contributors/', )
- def __unicode__(self):
- return "%s %s" % (self.firstname, self.lastname)
+ def __unicode__(self):
+ return "%s %s" % (self.firstname, self.lastname)
- class Meta:
- ordering = ('lastname', 'firstname',)
+ class Meta:
+ ordering = ('lastname', 'firstname',)
diff --git a/pgweb/contributors/struct.py b/pgweb/contributors/struct.py
index 02c04ca1..9bd244c9 100644
--- a/pgweb/contributors/struct.py
+++ b/pgweb/contributors/struct.py
@@ -1,2 +1,2 @@
def get_struct():
- yield ('community/contributors/', None)
+ yield ('community/contributors/', None)
diff --git a/pgweb/contributors/views.py b/pgweb/contributors/views.py
index 31027de5..0be074a4 100644
--- a/pgweb/contributors/views.py
+++ b/pgweb/contributors/views.py
@@ -3,7 +3,7 @@ from pgweb.util.contexts import render_pgweb
from models import ContributorType
def completelist(request):
- contributortypes = list(ContributorType.objects.all())
- return render_pgweb(request, 'community', 'contributors/list.html', {
- 'contributortypes': contributortypes,
- })
+ contributortypes = list(ContributorType.objects.all())
+ return render_pgweb(request, 'community', 'contributors/list.html', {
+ 'contributortypes': contributortypes,
+ })
diff --git a/pgweb/core/admin.py b/pgweb/core/admin.py
index 40cff332..b03f1de0 100644
--- a/pgweb/core/admin.py
+++ b/pgweb/core/admin.py
@@ -10,28 +10,28 @@ from pgweb.core.models import ModerationNotification
from pgweb.core.lookups import UserLookup
class OrganisationAdminForm(forms.ModelForm):
- class Meta:
- model = Organisation
- exclude = ()
- widgets = {
- 'managers': AutoCompleteSelectMultipleWidget(lookup_class=UserLookup),
- }
+ class Meta:
+ model = Organisation
+ exclude = ()
+ widgets = {
+ 'managers': AutoCompleteSelectMultipleWidget(lookup_class=UserLookup),
+ }
- def __init__(self, *args, **kwargs):
- super(OrganisationAdminForm, self).__init__(*args, **kwargs)
- self.fields['managers'].widget.can_add_related = False
- self.fields['managers'].widget.can_change_related = False
- self.fields['managers'].widget.can_delete_related = False
+ def __init__(self, *args, **kwargs):
+ super(OrganisationAdminForm, self).__init__(*args, **kwargs)
+ self.fields['managers'].widget.can_add_related = False
+ self.fields['managers'].widget.can_change_related = False
+ self.fields['managers'].widget.can_delete_related = False
class OrganisationAdmin(admin.ModelAdmin):
- form = OrganisationAdminForm
- list_display = ('name', 'approved', 'lastconfirmed',)
- list_filter = ('approved',)
- ordering = ('name', )
- search_fields = ('name', )
+ form = OrganisationAdminForm
+ list_display = ('name', 'approved', 'lastconfirmed',)
+ list_filter = ('approved',)
+ ordering = ('name', )
+ search_fields = ('name', )
class VersionAdmin(admin.ModelAdmin):
- list_display = ('versionstring', 'reldate', 'supported', 'current', )
+ list_display = ('versionstring', 'reldate', 'supported', 'current', )
admin.site.register(Version, VersionAdmin)
admin.site.register(OrganisationType)
diff --git a/pgweb/core/feeds.py b/pgweb/core/feeds.py
index 076ddd4d..45a0ee67 100644
--- a/pgweb/core/feeds.py
+++ b/pgweb/core/feeds.py
@@ -5,18 +5,18 @@ from models import Version
from datetime import datetime, time
class VersionFeed(Feed):
- title = "PostgreSQL latest versions"
- link = "https://www.postgresql.org/"
- description = "PostgreSQL latest versions"
+ title = "PostgreSQL latest versions"
+ link = "https://www.postgresql.org/"
+ description = "PostgreSQL latest versions"
- description_template = 'core/version_rss_description.html'
- title_template = 'core/version_rss_title.html'
+ description_template = 'core/version_rss_description.html'
+ title_template = 'core/version_rss_title.html'
- def items(self):
- return Version.objects.filter(tree__gt=0).filter(testing=0)
+ def items(self):
+ return Version.objects.filter(tree__gt=0).filter(testing=0)
- def item_link(self, obj):
- return "https://www.postgresql.org/docs/%s/%s" % (obj.numtree, obj.relnotes)
+ def item_link(self, obj):
+ return "https://www.postgresql.org/docs/%s/%s" % (obj.numtree, obj.relnotes)
- def item_pubdate(self, obj):
- return datetime.combine(obj.reldate,time.min)
+ def item_pubdate(self, obj):
+ return datetime.combine(obj.reldate,time.min)
diff --git a/pgweb/core/forms.py b/pgweb/core/forms.py
index 917615e9..80e41876 100644
--- a/pgweb/core/forms.py
+++ b/pgweb/core/forms.py
@@ -5,60 +5,60 @@ from models import Organisation
from django.contrib.auth.models import User
class OrganisationForm(forms.ModelForm):
- remove_manager = forms.ModelMultipleChoiceField(required=False, queryset=None, label="Current manager(s)", help_text="Select one or more managers to remove")
- add_manager = forms.EmailField(required=False)
+ remove_manager = forms.ModelMultipleChoiceField(required=False, queryset=None, label="Current manager(s)", help_text="Select one or more managers to remove")
+ add_manager = forms.EmailField(required=False)
- class Meta:
- model = Organisation
- exclude = ('lastconfirmed', 'approved', 'managers', )
+ class Meta:
+ model = Organisation
+ exclude = ('lastconfirmed', 'approved', 'managers', )
- def __init__(self, *args, **kwargs):
- super(OrganisationForm, self).__init__(*args, **kwargs)
- if self.instance and self.instance.pk:
- self.fields['remove_manager'].queryset = self.instance.managers.all()
- else:
- del self.fields['remove_manager']
- del self.fields['add_manager']
+ def __init__(self, *args, **kwargs):
+ super(OrganisationForm, self).__init__(*args, **kwargs)
+ if self.instance and self.instance.pk:
+ self.fields['remove_manager'].queryset = self.instance.managers.all()
+ else:
+ del self.fields['remove_manager']
+ del self.fields['add_manager']
- def clean_add_manager(self):
- if self.cleaned_data['add_manager']:
- # Something was added as manager - let's make sure the user exists
- try:
- User.objects.get(email=self.cleaned_data['add_manager'].lower())
- except User.DoesNotExist:
- raise ValidationError("User with email %s not found" % self.cleaned_data['add_manager'])
+ def clean_add_manager(self):
+ if self.cleaned_data['add_manager']:
+ # Something was added as manager - let's make sure the user exists
+ try:
+ User.objects.get(email=self.cleaned_data['add_manager'].lower())
+ except User.DoesNotExist:
+ raise ValidationError("User with email %s not found" % self.cleaned_data['add_manager'])
- return self.cleaned_data['add_manager']
+ return self.cleaned_data['add_manager']
- def clean_remove_manager(self):
- if self.cleaned_data['remove_manager']:
- removecount = 0
- for toremove in self.cleaned_data['remove_manager']:
- if toremove in self.instance.managers.all():
- removecount += 1
+ def clean_remove_manager(self):
+ if self.cleaned_data['remove_manager']:
+ removecount = 0
+ for toremove in self.cleaned_data['remove_manager']:
+ if toremove in self.instance.managers.all():
+ removecount += 1
- if len(self.instance.managers.all()) - removecount <= 0:
- raise ValidationError("Cannot remove all managers from an organsation!")
- return self.cleaned_data['remove_manager']
+ if len(self.instance.managers.all()) - removecount <= 0:
+ raise ValidationError("Cannot remove all managers from an organsation!")
+ return self.cleaned_data['remove_manager']
- def save(self, commit=True):
- model = super(OrganisationForm, self).save(commit=False)
- if self.cleaned_data.has_key('add_manager') and self.cleaned_data['add_manager']:
- model.managers.add(User.objects.get(email=self.cleaned_data['add_manager'].lower()))
- if self.cleaned_data.has_key('remove_manager') and self.cleaned_data['remove_manager']:
- for toremove in self.cleaned_data['remove_manager']:
- model.managers.remove(toremove)
+ def save(self, commit=True):
+ model = super(OrganisationForm, self).save(commit=False)
+ if self.cleaned_data.has_key('add_manager') and self.cleaned_data['add_manager']:
+ model.managers.add(User.objects.get(email=self.cleaned_data['add_manager'].lower()))
+ if self.cleaned_data.has_key('remove_manager') and self.cleaned_data['remove_manager']:
+ for toremove in self.cleaned_data['remove_manager']:
+ model.managers.remove(toremove)
- return model
+ return model
- def apply_submitter(self, model, User):
- model.managers.add(User)
+ def apply_submitter(self, model, User):
+ model.managers.add(User)
class MergeOrgsForm(forms.Form):
- merge_into = forms.ModelChoiceField(queryset=Organisation.objects.all())
- merge_from = forms.ModelChoiceField(queryset=Organisation.objects.all())
+ merge_into = forms.ModelChoiceField(queryset=Organisation.objects.all())
+ merge_from = forms.ModelChoiceField(queryset=Organisation.objects.all())
- def clean(self):
- if self.cleaned_data['merge_into'] == self.cleaned_data['merge_from']:
- raise ValidationError("The two organisations selected must be different!")
- return self.cleaned_data
+ def clean(self):
+ if self.cleaned_data['merge_into'] == self.cleaned_data['merge_from']:
+ raise ValidationError("The two organisations selected must be different!")
+ return self.cleaned_data
diff --git a/pgweb/core/lookups.py b/pgweb/core/lookups.py
index 806d1d8a..3f64cb2e 100644
--- a/pgweb/core/lookups.py
+++ b/pgweb/core/lookups.py
@@ -6,20 +6,20 @@ from selectable.decorators import staff_member_required
@staff_member_required
class UserLookup(ModelLookup):
- model = User
- search_fields = (
- 'username__icontains',
- 'first_name__icontains',
- 'last_name__icontains',
- )
- filters = {'is_active': True, }
+ model = User
+ search_fields = (
+ 'username__icontains',
+ 'first_name__icontains',
+ 'last_name__icontains',
+ )
+ filters = {'is_active': True, }
- def get_item_value(self, item):
- # Display for currently selected item
- return u"%s (%s)" % (item.username, item.get_full_name())
+ def get_item_value(self, item):
+ # Display for currently selected item
+ return u"%s (%s)" % (item.username, item.get_full_name())
- def get_item_label(self, item):
- # Display for choice listings
- return u"%s (%s)" % (item.username, item.get_full_name())
+ def get_item_label(self, item):
+ # Display for choice listings
+ return u"%s (%s)" % (item.username, item.get_full_name())
registry.register(UserLookup)
diff --git a/pgweb/core/management/commands/cleanup_old_records.py b/pgweb/core/management/commands/cleanup_old_records.py
index b971b76f..4fda8693 100644
--- a/pgweb/core/management/commands/cleanup_old_records.py
+++ b/pgweb/core/management/commands/cleanup_old_records.py
@@ -18,18 +18,18 @@ from datetime import datetime, timedelta
from pgweb.account.models import EmailChangeToken
class Command(BaseCommand):
- help = 'Cleanup old records'
+ help = 'Cleanup old records'
- def handle(self, *args, **options):
- # Grab advisory lock, if available. Lock id is just a random number
- # since we only need to interlock against ourselves. The lock is
- # automatically released when we're done.
- curs = connection.cursor()
- curs.execute("SELECT pg_try_advisory_lock(2896719)")
- if not curs.fetchall()[0][0]:
- print "Failed to get advisory lock, existing cleanup_old_records process stuck?"
- sys.exit(1)
+ def handle(self, *args, **options):
+ # Grab advisory lock, if available. Lock id is just a random number
+ # since we only need to interlock against ourselves. The lock is
+ # automatically released when we're done.
+ curs = connection.cursor()
+ curs.execute("SELECT pg_try_advisory_lock(2896719)")
+ if not curs.fetchall()[0][0]:
+ print "Failed to get advisory lock, existing cleanup_old_records process stuck?"
+ sys.exit(1)
- # Clean up old email change tokens
- with transaction.atomic():
- EmailChangeToken.objects.filter(sentat__lt=datetime.now()-timedelta(hours=24)).delete()
+ # Clean up old email change tokens
+ with transaction.atomic():
+ EmailChangeToken.objects.filter(sentat__lt=datetime.now()-timedelta(hours=24)).delete()
diff --git a/pgweb/core/management/commands/fetch_rss_feeds.py b/pgweb/core/management/commands/fetch_rss_feeds.py
index 20f31704..44c6ee7c 100644
--- a/pgweb/core/management/commands/fetch_rss_feeds.py
+++ b/pgweb/core/management/commands/fetch_rss_feeds.py
@@ -14,39 +14,39 @@ from datetime import datetime
from pgweb.core.models import ImportedRSSFeed, ImportedRSSItem
class Command(BaseCommand):
- help = 'Fetch RSS feeds'
+ help = 'Fetch RSS feeds'
- def handle(self, *args, **options):
- socket.setdefaulttimeout(20)
+ def handle(self, *args, **options):
+ socket.setdefaulttimeout(20)
- with transaction.atomic():
- for importfeed in ImportedRSSFeed.objects.all():
- try:
- feed = feedparser.parse(importfeed.url)
+ with transaction.atomic():
+ for importfeed in ImportedRSSFeed.objects.all():
+ try:
+ feed = feedparser.parse(importfeed.url)
- if not hasattr(feed, 'status'):
- # bozo_excpetion can seemingly be set when there is no error as well,
- # so make sure we only check if we didn't get a status.
- if hasattr(feed,'bozo_exception'):
- raise Exception('Feed load error %s' % feed.bozo_exception)
- raise Exception('Feed load error with no exception!')
- if feed.status != 200:
- raise Exception('Feed returned status %s' % feed.status)
+ if not hasattr(feed, 'status'):
+ # bozo_excpetion can seemingly be set when there is no error as well,
+ # so make sure we only check if we didn't get a status.
+ if hasattr(feed,'bozo_exception'):
+ raise Exception('Feed load error %s' % feed.bozo_exception)
+ raise Exception('Feed load error with no exception!')
+ if feed.status != 200:
+ raise Exception('Feed returned status %s' % feed.status)
- fetchedsomething = False
- for entry in feed.entries:
- try:
- item = ImportedRSSItem.objects.get(feed=importfeed, url=entry.link)
- except ImportedRSSItem.DoesNotExist:
- item = ImportedRSSItem(feed=importfeed,
- title=entry.title[:100],
- url=entry.link,
- posttime=datetime(*(entry.published_parsed[0:6])),
- )
- item.save()
- fetchedsomething = True
+ fetchedsomething = False
+ for entry in feed.entries:
+ try:
+ item = ImportedRSSItem.objects.get(feed=importfeed, url=entry.link)
+ except ImportedRSSItem.DoesNotExist:
+ item = ImportedRSSItem(feed=importfeed,
+ title=entry.title[:100],
+ url=entry.link,
+ posttime=datetime(*(entry.published_parsed[0:6])),
+ )
+ item.save()
+ fetchedsomething = True
- if fetchedsomething:
- importfeed.purge_related()
- except Exception, e:
- print "Failed to load %s: %s" % (importfeed, e)
+ if fetchedsomething:
+ importfeed.purge_related()
+ except Exception, e:
+ print "Failed to load %s: %s" % (importfeed, e)
diff --git a/pgweb/core/management/commands/moderation_report.py b/pgweb/core/management/commands/moderation_report.py
index edd0bc7c..cdc3f28a 100644
--- a/pgweb/core/management/commands/moderation_report.py
+++ b/pgweb/core/management/commands/moderation_report.py
@@ -15,17 +15,17 @@ from pgweb.util.moderation import get_all_pending_moderations
from pgweb.util.misc import send_template_mail
class Command(BaseCommand):
- help = 'Send moderation report'
+ help = 'Send moderation report'
- def handle(self, *args, **options):
- with transaction.atomic():
- counts = [{'name': unicode(x['name']), 'count': len(x['entries'])} for x in get_all_pending_moderations()]
- if len(counts):
- # Generate an email and send it off
- send_template_mail(settings.NOTIFICATION_FROM,
- settings.NOTIFICATION_EMAIL,
- "PostgreSQL moderation report: %s" % datetime.now(),
- "core/moderation_report.txt",
- {
- 'items': counts,
- })
+ def handle(self, *args, **options):
+ with transaction.atomic():
+ counts = [{'name': unicode(x['name']), 'count': len(x['entries'])} for x in get_all_pending_moderations()]
+ if len(counts):
+ # Generate an email and send it off
+ send_template_mail(settings.NOTIFICATION_FROM,
+ settings.NOTIFICATION_EMAIL,
+ "PostgreSQL moderation report: %s" % datetime.now(),
+ "core/moderation_report.txt",
+ {
+ 'items': counts,
+ })
diff --git a/pgweb/core/management/commands/sessioninfo.py b/pgweb/core/management/commands/sessioninfo.py
index a99f1adf..aa5aaa3e 100644
--- a/pgweb/core/management/commands/sessioninfo.py
+++ b/pgweb/core/management/commands/sessioninfo.py
@@ -7,36 +7,36 @@ from django.contrib.sessions.models import Session
from django.contrib.auth.models import User
class Command(BaseCommand):
- help = 'Dump interesting information about a session'
+ help = 'Dump interesting information about a session'
- def add_arguments(self, parser):
- parser.add_argument('sessionid')
+ def add_arguments(self, parser):
+ parser.add_argument('sessionid')
- def handle(self, *args, **options):
- try:
- session = Session.objects.get(session_key=options['sessionid']).get_decoded()
- uid = session.get('_auth_user_id')
+ def handle(self, *args, **options):
+ try:
+ session = Session.objects.get(session_key=options['sessionid']).get_decoded()
+ uid = session.get('_auth_user_id')
- print u"Session {0}".format(options['sessionid'])
+ print u"Session {0}".format(options['sessionid'])
- try:
- user = User.objects.get(pk=uid)
- print " -- Logged in user --"
- print u"Userid: {0}".format(uid)
- print u"Username: {0}".format(user.username)
- print u"Name: {0}".format(user.get_full_name())
- print u"Email: {0}".format(user.email)
- except User.DoesNotExist:
- print "** Associated user not found. Maybe not logged in?"
+ try:
+ user = User.objects.get(pk=uid)
+ print " -- Logged in user --"
+ print u"Userid: {0}".format(uid)
+ print u"Username: {0}".format(user.username)
+ print u"Name: {0}".format(user.get_full_name())
+ print u"Email: {0}".format(user.email)
+ except User.DoesNotExist:
+ print "** Associated user not found. Maybe not logged in?"
- # Remove known keys
- for k in ('_auth_user_id', '_auth_user_hash', '_auth_user_backend'):
- session.pop(k, None)
- if session:
- print " -- Other session values --"
- for k,v in session.items():
- print u"{0:20} {1}".format(k,v)
+ # Remove known keys
+ for k in ('_auth_user_id', '_auth_user_hash', '_auth_user_backend'):
+ session.pop(k, None)
+ if session:
+ print " -- Other session values --"
+ for k,v in session.items():
+ print u"{0:20} {1}".format(k,v)
- except Session.DoesNotExist:
- raise CommandError('Session not found')
+ except Session.DoesNotExist:
+ raise CommandError('Session not found')
diff --git a/pgweb/core/models.py b/pgweb/core/models.py
index 8edb07bb..d1d74bd9 100644
--- a/pgweb/core/models.py
+++ b/pgweb/core/models.py
@@ -6,203 +6,203 @@ from pgweb.util.misc import varnish_purge
import base64
TESTING_CHOICES = (
- (0, 'Release'),
- (1, 'Release candidate'),
- (2, 'Beta'),
- (3, 'Alpha'),
- )
+ (0, 'Release'),
+ (1, 'Release candidate'),
+ (2, 'Beta'),
+ (3, 'Alpha'),
+ )
TESTING_SHORTSTRING = ('', 'rc', 'beta', 'alpha')
class Version(models.Model):
- tree = models.DecimalField(max_digits=3, decimal_places=1, null=False, blank=False, unique=True)
- latestminor = models.IntegerField(null=False, blank=False, default=0, help_text="For testing versions, latestminor means latest beta/rc number. For other releases, it's the latest minor release number in the tree.")
- reldate = models.DateField(null=False, blank=False)
- relnotes = models.CharField(max_length=32, null=False, blank=False)
- current = models.BooleanField(null=False, blank=False, default=False)
- supported = models.BooleanField(null=False, blank=False, default=True)
- testing = models.IntegerField(null=False, blank=False, default=0, help_text="Testing level of this release. latestminor indicates beta/rc number", choices=TESTING_CHOICES)
- docsloaded = models.DateTimeField(null=True, blank=True, help_text="The timestamp of the latest docs load. Used to control indexing and info on developer docs.")
- firstreldate = models.DateField(null=False, blank=False, help_text="The date of the .0 release in this tree")
- eoldate = models.DateField(null=False, blank=False, help_text="The final release date for this tree")
+ tree = models.DecimalField(max_digits=3, decimal_places=1, null=False, blank=False, unique=True)
+ latestminor = models.IntegerField(null=False, blank=False, default=0, help_text="For testing versions, latestminor means latest beta/rc number. For other releases, it's the latest minor release number in the tree.")
+ reldate = models.DateField(null=False, blank=False)
+ relnotes = models.CharField(max_length=32, null=False, blank=False)
+ current = models.BooleanField(null=False, blank=False, default=False)
+ supported = models.BooleanField(null=False, blank=False, default=True)
+ testing = models.IntegerField(null=False, blank=False, default=0, help_text="Testing level of this release. latestminor indicates beta/rc number", choices=TESTING_CHOICES)
+ docsloaded = models.DateTimeField(null=True, blank=True, help_text="The timestamp of the latest docs load. Used to control indexing and info on developer docs.")
+ firstreldate = models.DateField(null=False, blank=False, help_text="The date of the .0 release in this tree")
+ eoldate = models.DateField(null=False, blank=False, help_text="The final release date for this tree")
- def __unicode__(self):
- return self.versionstring
+ def __unicode__(self):
+ return self.versionstring
- @property
- def versionstring(self):
- return self.buildversionstring(self.latestminor)
+ @property
+ def versionstring(self):
+ return self.buildversionstring(self.latestminor)
- @property
- def numtree(self):
- # Return the proper numeric tree version, taking into account that PostgreSQL 10
- # changed from x.y to x for major version.
- if self.tree >= 10:
- return int(self.tree)
- else:
- return self.tree
+ @property
+ def numtree(self):
+ # Return the proper numeric tree version, taking into account that PostgreSQL 10
+ # changed from x.y to x for major version.
+ if self.tree >= 10:
+ return int(self.tree)
+ else:
+ return self.tree
- def buildversionstring(self, minor):
- if not self.testing:
- return "%s.%s" % (self.numtree, minor)
- else:
- return "%s%s%s" % (self.numtree, TESTING_SHORTSTRING[self.testing], minor)
+ def buildversionstring(self, minor):
+ if not self.testing:
+ return "%s.%s" % (self.numtree, minor)
+ else:
+ return "%s%s%s" % (self.numtree, TESTING_SHORTSTRING[self.testing], minor)
- @property
- def treestring(self):
- if not self.testing:
- return "%s" % self.numtree
- else:
- return "%s %s" % (self.numtree, TESTING_SHORTSTRING[self.testing])
+ @property
+ def treestring(self):
+ if not self.testing:
+ return "%s" % self.numtree
+ else:
+ return "%s %s" % (self.numtree, TESTING_SHORTSTRING[self.testing])
- def save(self):
- # Make sure only one version at a time can be the current one.
- # (there may be some small race conditions here, but the likelyhood
- # that two admins are editing the version list at the same time...)
- if self.current:
- previous = Version.objects.filter(current=True)
- for p in previous:
- if not p == self:
- p.current = False
- p.save() # primary key check avoids recursion
+ def save(self):
+ # Make sure only one version at a time can be the current one.
+ # (there may be some small race conditions here, but the likelyhood
+ # that two admins are editing the version list at the same time...)
+ if self.current:
+ previous = Version.objects.filter(current=True)
+ for p in previous:
+ if not p == self:
+ p.current = False
+ p.save() # primary key check avoids recursion
- # Now that we've made any previously current ones non-current, we are
- # free to save this one.
- super(Version, self).save()
+ # Now that we've made any previously current ones non-current, we are
+ # free to save this one.
+ super(Version, self).save()
- class Meta:
- ordering = ('-tree', )
+ class Meta:
+ ordering = ('-tree', )
- def purge_urls(self):
- yield '/$'
- yield '/support/versioning'
- yield '/support/security'
- yield '/docs/$'
- yield '/docs/manuals'
- yield '/about/featurematrix/$'
- yield '/versions.rss'
+ def purge_urls(self):
+ yield '/$'
+ yield '/support/versioning'
+ yield '/support/security'
+ yield '/docs/$'
+ yield '/docs/manuals'
+ yield '/about/featurematrix/$'
+ yield '/versions.rss'
class Country(models.Model):
- name = models.CharField(max_length=100, null=False, blank=False)
- tld = models.CharField(max_length=3, null=False, blank=False)
+ name = models.CharField(max_length=100, null=False, blank=False)
+ tld = models.CharField(max_length=3, null=False, blank=False)
- class Meta:
- db_table = 'countries'
- ordering = ('name',)
- verbose_name = 'Country'
- verbose_name_plural = 'Countries'
+ class Meta:
+ db_table = 'countries'
+ ordering = ('name',)
+ verbose_name = 'Country'
+ verbose_name_plural = 'Countries'
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
class Language(models.Model):
- # Import data from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
- # (yes, there is a UTF16 BOM in the UTF8 file)
- # (and yes, there is a 7 length value in a field specified as 3 chars)
- alpha3 = models.CharField(max_length=7, null=False, blank=False, primary_key=True)
- alpha3term = models.CharField(max_length=3, null=False, blank=True)
- alpha2 = models.CharField(max_length=2, null=False, blank=True)
- name = models.CharField(max_length=100, null=False, blank=False)
- frenchname = models.CharField(max_length=100, null=False, blank=False)
+ # Import data from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
+ # (yes, there is a UTF16 BOM in the UTF8 file)
+ # (and yes, there is a 7 length value in a field specified as 3 chars)
+ alpha3 = models.CharField(max_length=7, null=False, blank=False, primary_key=True)
+ alpha3term = models.CharField(max_length=3, null=False, blank=True)
+ alpha2 = models.CharField(max_length=2, null=False, blank=True)
+ name = models.CharField(max_length=100, null=False, blank=False)
+ frenchname = models.CharField(max_length=100, null=False, blank=False)
- class Meta:
- ordering = ('name', )
+ class Meta:
+ ordering = ('name', )
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
class OrganisationType(models.Model):
- typename = models.CharField(max_length=32, null=False, blank=False)
+ typename = models.CharField(max_length=32, null=False, blank=False)
- def __unicode__(self):
- return self.typename
+ def __unicode__(self):
+ return self.typename
class Organisation(models.Model):
- name = models.CharField(max_length=100, null=False, blank=False, unique=True)
- approved = models.BooleanField(null=False, default=False)
- address = models.TextField(null=False, blank=True)
- url = models.URLField(null=False, blank=False)
- email = models.EmailField(null=False, blank=True)
- phone = models.CharField(max_length=100, null=False, blank=True)
- orgtype = models.ForeignKey(OrganisationType, null=False, blank=False, verbose_name="Organisation type")
- managers = models.ManyToManyField(User, blank=False)
- lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True)
+ name = models.CharField(max_length=100, null=False, blank=False, unique=True)
+ approved = models.BooleanField(null=False, default=False)
+ address = models.TextField(null=False, blank=True)
+ url = models.URLField(null=False, blank=False)
+ email = models.EmailField(null=False, blank=True)
+ phone = models.CharField(max_length=100, null=False, blank=True)
+ orgtype = models.ForeignKey(OrganisationType, null=False, blank=False, verbose_name="Organisation type")
+ managers = models.ManyToManyField(User, blank=False)
+ lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True)
- send_notification = True
- send_m2m_notification = True
+ send_notification = True
+ send_m2m_notification = True
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
- class Meta:
- ordering = ('name',)
+ class Meta:
+ ordering = ('name',)
# Basic classes for importing external RSS feeds, such as planet
class ImportedRSSFeed(models.Model):
- internalname = models.CharField(max_length=32, null=False, blank=False, unique=True)
- url = models.URLField(null=False, blank=False)
- purgepattern = models.CharField(max_length=512, null=False, blank=True, help_text="NOTE! Pattern will be automatically anchored with ^ at the beginning, but you must lead with a slash in most cases - and don't forget to include the trailing $ in most cases")
+ internalname = models.CharField(max_length=32, null=False, blank=False, unique=True)
+ url = models.URLField(null=False, blank=False)
+ purgepattern = models.CharField(max_length=512, null=False, blank=True, help_text="NOTE! Pattern will be automatically anchored with ^ at the beginning, but you must lead with a slash in most cases - and don't forget to include the trailing $ in most cases")
- def purge_related(self):
- if self.purgepattern:
- varnish_purge(self.purgepattern)
+ def purge_related(self):
+ if self.purgepattern:
+ varnish_purge(self.purgepattern)
- def __unicode__(self):
- return self.internalname
+ def __unicode__(self):
+ return self.internalname
class ImportedRSSItem(models.Model):
- feed = models.ForeignKey(ImportedRSSFeed)
- title = models.CharField(max_length=100, null=False, blank=False)
- url = models.URLField(null=False, blank=False)
- posttime = models.DateTimeField(null=False, blank=False)
+ feed = models.ForeignKey(ImportedRSSFeed)
+ title = models.CharField(max_length=100, null=False, blank=False)
+ url = models.URLField(null=False, blank=False)
+ posttime = models.DateTimeField(null=False, blank=False)
- def __unicode__(self):
- return self.title
+ def __unicode__(self):
+ return self.title
- @property
- def date(self):
- return self.posttime.strftime("%Y-%m-%d")
+ @property
+ def date(self):
+ return self.posttime.strftime("%Y-%m-%d")
# From man sshd, except for ssh-dss
_valid_keytypes = ['ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519', 'ssh-rsa']
# Options, keytype, key, comment. But we don't support options.
def validate_sshkey(key):
- lines = key.splitlines()
- for k in lines:
- pieces = k.split()
- if len(pieces) == 0:
- raise ValidationError("Empty keys are not allowed")
- if len(pieces) > 3:
- raise ValidationError('Paste each ssh key without options, e.g. "ssh-rsa AAAAbbbcc mykey@machine"')
- if pieces[0] == 'ssh-dss':
- raise ValidationError("For security reasons, ssh-dss keys are not supported")
- if pieces[0] not in _valid_keytypes:
- raise ValidationError(u"Only keys of types {0} are supported, not {1}.".format(", ".join(_valid_keytypes), pieces[0]))
- try:
- base64.b64decode(pieces[1])
- except:
- raise ValidationError("Incorrect base64 encoded key!")
+ lines = key.splitlines()
+ for k in lines:
+ pieces = k.split()
+ if len(pieces) == 0:
+ raise ValidationError("Empty keys are not allowed")
+ if len(pieces) > 3:
+ raise ValidationError('Paste each ssh key without options, e.g. "ssh-rsa AAAAbbbcc mykey@machine"')
+ if pieces[0] == 'ssh-dss':
+ raise ValidationError("For security reasons, ssh-dss keys are not supported")
+ if pieces[0] not in _valid_keytypes:
+ raise ValidationError(u"Only keys of types {0} are supported, not {1}.".format(", ".join(_valid_keytypes), pieces[0]))
+ try:
+ base64.b64decode(pieces[1])
+ except:
+ raise ValidationError("Incorrect base64 encoded key!")
# Extra attributes for users (if they have them)
class UserProfile(models.Model):
- user = models.OneToOneField(User, null=False, blank=False, primary_key=True)
- sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text= "Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ])
- lastmodified = models.DateTimeField(null=False, blank=False, auto_now=True)
+ user = models.OneToOneField(User, null=False, blank=False, primary_key=True)
+ sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text= "Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ])
+ lastmodified = models.DateTimeField(null=False, blank=False, auto_now=True)
# Notifications sent for any moderated content.
# Yes, we uglify it by storing the type of object as a string, so we don't
# end up with a bazillion fields being foreign keys. Ugly, but works.
class ModerationNotification(models.Model):
- objectid = models.IntegerField(null=False, blank=False, db_index=True)
- objecttype = models.CharField(null=False, blank=False, max_length=100)
- text = models.TextField(null=False, blank=False)
- author = models.CharField(null=False, blank=False, max_length=100)
- date = models.DateTimeField(null=False, blank=False, auto_now=True)
+ objectid = models.IntegerField(null=False, blank=False, db_index=True)
+ objecttype = models.CharField(null=False, blank=False, max_length=100)
+ text = models.TextField(null=False, blank=False)
+ author = models.CharField(null=False, blank=False, max_length=100)
+ date = models.DateTimeField(null=False, blank=False, auto_now=True)
- def __unicode__(self):
- return "%s id %s (%s): %s" % (self.objecttype, self.objectid, self.date, self.text[:50])
+ def __unicode__(self):
+ return "%s id %s (%s): %s" % (self.objecttype, self.objectid, self.date, self.text[:50])
- class Meta:
- ordering = ('-date', )
+ class Meta:
+ ordering = ('-date', )
diff --git a/pgweb/core/struct.py b/pgweb/core/struct.py
index e3c697b3..2829d1a5 100644
--- a/pgweb/core/struct.py
+++ b/pgweb/core/struct.py
@@ -1,16 +1,16 @@
import os
def get_struct():
- yield ('', None)
- yield ('community/', None)
- yield ('support/versioning/', None)
+ yield ('', None)
+ yield ('community/', None)
+ yield ('support/versioning/', None)
- # Enumerate all the templates that will generate pages
- pages_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../templates/pages/'))
- for root, dirs, files in os.walk(pages_dir):
- # Cut out the reference to the absolute root path
- r = '' if root == pages_dir else os.path.relpath(root, pages_dir)
- for f in files:
- if f.endswith('.html'):
- yield (os.path.join(r, f)[:-5] + "/",
- None)
+ # Enumerate all the templates that will generate pages
+ pages_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../templates/pages/'))
+ for root, dirs, files in os.walk(pages_dir):
+ # Cut out the reference to the absolute root path
+ r = '' if root == pages_dir else os.path.relpath(root, pages_dir)
+ for f in files:
+ if f.endswith('.html'):
+ yield (os.path.join(r, f)[:-5] + "/",
+ None)
diff --git a/pgweb/core/templatetags/pgfilters.py b/pgweb/core/templatetags/pgfilters.py
index 8670535e..2d8cce99 100644
--- a/pgweb/core/templatetags/pgfilters.py
+++ b/pgweb/core/templatetags/pgfilters.py
@@ -7,50 +7,50 @@ register = template.Library()
@register.filter(name='class_name')
def class_name(ob):
- return ob.__class__.__name__
+ return ob.__class__.__name__
@register.filter(is_safe=True)
def field_class(value, arg):
- if 'class' in value.field.widget.attrs:
- c = arg + ' ' + value.field.widget.attrs['class']
- else:
- c = arg
- return value.as_widget(attrs={"class": c})
+ if 'class' in value.field.widget.attrs:
+ c = arg + ' ' + value.field.widget.attrs['class']
+ else:
+ c = arg
+ return value.as_widget(attrs={"class": c})
@register.filter(name='hidemail')
@stringfilter
def hidemail(value):
- return value.replace('@', ' at ')
+ return value.replace('@', ' at ')
@register.filter(is_safe=True)
def ischeckbox(obj):
- return obj.field.widget.__class__.__name__ in ["CheckboxInput", "CheckboxSelectMultiple"] and not getattr(obj.field, 'regular_field', False)
+ return obj.field.widget.__class__.__name__ in ["CheckboxInput", "CheckboxSelectMultiple"] and not getattr(obj.field, 'regular_field', False)
@register.filter(is_safe=True)
def ismultiplecheckboxes(obj):
- return obj.field.widget.__class__.__name__ == "CheckboxSelectMultiple" and not getattr(obj.field, 'regular_field', False)
+ return obj.field.widget.__class__.__name__ == "CheckboxSelectMultiple" and not getattr(obj.field, 'regular_field', False)
@register.filter(is_safe=True)
def isrequired_error(obj):
- if obj.errors and obj.errors[0] == u"This field is required.":
- return True
- return False
+ if obj.errors and obj.errors[0] == u"This field is required.":
+ return True
+ return False
@register.filter(is_safe=True)
def label_class(value, arg):
- return value.label_tag(attrs={'class': arg})
+ return value.label_tag(attrs={'class': arg})
@register.filter()
def planet_author(obj):
- # takes a ImportedRSSItem object from a Planet feed and extracts the author
- # information from the title
- return obj.title.split(':')[0]
+ # takes a ImportedRSSItem object from a Planet feed and extracts the author
+ # information from the title
+ return obj.title.split(':')[0]
@register.filter()
def planet_title(obj):
- # takes a ImportedRSSItem object from a Planet feed and extracts the info
- # specific to the title of the Planet entry
- return ":".join(obj.title.split(':')[1:])
+ # takes a ImportedRSSItem object from a Planet feed and extracts the info
+ # specific to the title of the Planet entry
+ return ":".join(obj.title.split(':')[1:])
@register.filter(name='dictlookup')
def dictlookup(value, key):
@@ -58,4 +58,4 @@ def dictlookup(value, key):
@register.filter(name='json')
def tojson(value):
- return json.dumps(value)
+ return json.dumps(value)
diff --git a/pgweb/core/views.py b/pgweb/core/views.py
index efb07ce3..509a52bc 100644
--- a/pgweb/core/views.py
+++ b/pgweb/core/views.py
@@ -40,101 +40,101 @@ from forms import OrganisationForm, MergeOrgsForm
# Front page view
@cache(minutes=10)
def home(request):
- news = NewsArticle.objects.filter(approved=True)[:5]
- today = date.today()
- # get up to seven events to display on the homepage
- event_base_queryset = Event.objects.select_related('country').filter(
- approved=True,
- enddate__gte=today,
- )
- # first, see if there are up to two non-badged events within 90 days
- other_events = event_base_queryset.filter(
- badged=False,
- startdate__lte=today + timedelta(days=90),
- ).order_by('enddate', 'startdate')[:2]
- # based on that, get 7 - |other_events| community events to display
- community_event_queryset = event_base_queryset.filter(badged=True).order_by('enddate', 'startdate')[:(7 - other_events.count())]
- # now, return all the events in one unioned array!
- events = community_event_queryset.union(other_events).order_by('enddate', 'startdate').all()
- versions = Version.objects.filter(supported=True)
- planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:9]
+ news = NewsArticle.objects.filter(approved=True)[:5]
+ today = date.today()
+ # get up to seven events to display on the homepage
+ event_base_queryset = Event.objects.select_related('country').filter(
+ approved=True,
+ enddate__gte=today,
+ )
+ # first, see if there are up to two non-badged events within 90 days
+ other_events = event_base_queryset.filter(
+ badged=False,
+ startdate__lte=today + timedelta(days=90),
+ ).order_by('enddate', 'startdate')[:2]
+ # based on that, get 7 - |other_events| community events to display
+ community_event_queryset = event_base_queryset.filter(badged=True).order_by('enddate', 'startdate')[:(7 - other_events.count())]
+ # now, return all the events in one unioned array!
+ events = community_event_queryset.union(other_events).order_by('enddate', 'startdate').all()
+ versions = Version.objects.filter(supported=True)
+ planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:9]
- return render(request, 'index.html', {
- 'title': 'The world\'s most advanced open source database',
- 'news': news,
- 'newstags': NewsTag.objects.all(),
- 'events': events,
- 'versions': versions,
- 'planet': planet,
- })
+ return render(request, 'index.html', {
+ 'title': 'The world\'s most advanced open source database',
+ 'news': news,
+ 'newstags': NewsTag.objects.all(),
+ 'events': events,
+ 'versions': versions,
+ 'planet': planet,
+ })
# About page view (contains information about PostgreSQL + random quotes)
@cache(minutes=10)
def about(request):
- # get 5 random quotes
- quotes = Quote.objects.filter(approved=True).order_by('?').all()[:5]
- return render_pgweb(request, 'about', 'core/about.html', {
- 'quotes': quotes,
- })
+ # get 5 random quotes
+ quotes = Quote.objects.filter(approved=True).order_by('?').all()[:5]
+ return render_pgweb(request, 'about', 'core/about.html', {
+ 'quotes': quotes,
+ })
# Community main page (contains surveys and potentially more)
def community(request):
- s = Survey.objects.filter(current=True)
- try:
- s = s[0]
- except:
- s = None
- planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:7]
- return render_pgweb(request, 'community', 'core/community.html', {
- 'survey': s,
- 'planet': planet,
- })
+ s = Survey.objects.filter(current=True)
+ try:
+ s = s[0]
+ except:
+ s = None
+ planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:7]
+ return render_pgweb(request, 'community', 'core/community.html', {
+ 'survey': s,
+ 'planet': planet,
+ })
# List of supported versions
def versions(request):
- return render_pgweb(request, 'support', 'support/versioning.html', {
- 'versions': Version.objects.filter(tree__gt=0).filter(testing=0),
- })
+ return render_pgweb(request, 'support', 'support/versioning.html', {
+ 'versions': Version.objects.filter(tree__gt=0).filter(testing=0),
+ })
re_staticfilenames = re.compile("^[0-9A-Z/_-]+$", re.IGNORECASE)
# Generic fallback view for static pages
def fallback(request, url):
- if url.find('..') > -1:
- raise Http404('Page not found.')
+ if url.find('..') > -1:
+ raise Http404('Page not found.')
- if not re_staticfilenames.match(url):
- raise Http404('Page not found.')
+ if not re_staticfilenames.match(url):
+ raise Http404('Page not found.')
- try:
- t = loader.get_template('pages/%s.html' % url)
- except TemplateDoesNotExist:
- try:
- t = loader.get_template('pages/%s/en.html' % url)
- except TemplateDoesNotExist:
- raise Http404('Page not found.')
+ try:
+ t = loader.get_template('pages/%s.html' % url)
+ except TemplateDoesNotExist:
+ try:
+ t = loader.get_template('pages/%s/en.html' % url)
+ except TemplateDoesNotExist:
+ raise Http404('Page not found.')
- # Guestimate the nav section by looking at the URL and taking the first
- # piece of it.
- try:
- navsect = url.split('/',2)[0]
- except:
- navsect = ''
- c = PGWebContextProcessor(request)
- c.update({'navmenu': get_nav_menu(navsect)})
- return HttpResponse(t.render(c))
+ # Guestimate the nav section by looking at the URL and taking the first
+ # piece of it.
+ try:
+ navsect = url.split('/',2)[0]
+ except:
+ navsect = ''
+ c = PGWebContextProcessor(request)
+ c.update({'navmenu': get_nav_menu(navsect)})
+ return HttpResponse(t.render(c))
# Edit-forms for core objects
@login_required
def organisationform(request, itemid):
- if itemid != 'new':
- get_object_or_404(Organisation, pk=itemid, managers=request.user)
+ if itemid != 'new':
+ get_object_or_404(Organisation, pk=itemid, managers=request.user)
- return simple_form(Organisation, itemid, request, OrganisationForm,
- redirect='/account/edit/organisations/')
+ return simple_form(Organisation, itemid, request, OrganisationForm,
+ redirect='/account/edit/organisations/')
# robots.txt
def robots(request):
- return HttpResponse("""User-agent: *
+ return HttpResponse("""User-agent: *
Disallow: /admin/
Disallow: /account/
Disallow: /docs/devel/
@@ -148,203 +148,203 @@ Sitemap: https://www.postgresql.org/sitemap.xml
def _make_sitemap(pagelist):
- resp = HttpResponse(content_type='text/xml')
- x = PgXmlHelper(resp)
- x.startDocument()
- x.startElement('urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'})
- pages = 0
- for p in pagelist:
- pages+=1
- x.startElement('url', {})
- x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0]))
- if len(p) > 1 and p[1]:
- x.add_xml_element('priority', unicode(p[1]))
- if len(p) > 2 and p[2]:
- x.add_xml_element('lastmod', p[2].isoformat() + "Z")
- x.endElement('url')
- x.endElement('urlset')
- x.endDocument()
- return resp
+ resp = HttpResponse(content_type='text/xml')
+ x = PgXmlHelper(resp)
+ x.startDocument()
+ x.startElement('urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'})
+ pages = 0
+ for p in pagelist:
+ pages+=1
+ x.startElement('url', {})
+ x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0]))
+ if len(p) > 1 and p[1]:
+ x.add_xml_element('priority', unicode(p[1]))
+ if len(p) > 2 and p[2]:
+ x.add_xml_element('lastmod', p[2].isoformat() + "Z")
+ x.endElement('url')
+ x.endElement('urlset')
+ x.endDocument()
+ return resp
# Sitemap (XML format)
@cache(hours=6)
def sitemap(request):
- return _make_sitemap(get_all_pages_struct())
+ return _make_sitemap(get_all_pages_struct())
# Internal sitemap (only for our own search engine)
# Note! Still served up to anybody who wants it, so don't
# put anything secret in it...
@cache(hours=6)
def sitemap_internal(request):
- return _make_sitemap(get_all_pages_struct(method='get_internal_struct'))
+ return _make_sitemap(get_all_pages_struct(method='get_internal_struct'))
# dynamic CSS serving, meaning we merge a number of different CSS into a
# single one, making sure it turns into a single http response. We do this
# dynamically, since the output will be cached.
_dynamic_cssmap = {
- 'base': ['media/css/main.css',
- 'media/css/normalize.css',],
- 'docs': ['media/css/global.css',
- 'media/css/table.css',
- 'media/css/text.css',
- 'media/css/docs.css'],
- }
+ 'base': ['media/css/main.css',
+ 'media/css/normalize.css',],
+ 'docs': ['media/css/global.css',
+ 'media/css/table.css',
+ 'media/css/text.css',
+ 'media/css/docs.css'],
+ }
@cache(hours=6)
def dynamic_css(request, css):
- if not _dynamic_cssmap.has_key(css):
- raise Http404('CSS not found')
- files = _dynamic_cssmap[css]
- resp = HttpResponse(content_type='text/css')
+ if not _dynamic_cssmap.has_key(css):
+ raise Http404('CSS not found')
+ files = _dynamic_cssmap[css]
+ resp = HttpResponse(content_type='text/css')
- # We honor if-modified-since headers by looking at the most recently
- # touched CSS file.
- latestmod = 0
- for fn in files:
- try:
- stime = os.stat(fn).st_mtime
- if latestmod < stime:
- latestmod = stime
- except OSError:
- # If we somehow referred to a file that didn't exist, or
- # one that we couldn't access.
- raise Http404('CSS (sub) not found')
- if request.META.has_key('HTTP_IF_MODIFIED_SINCE'):
- # This code is mostly stolen from django :)
- matches = re.match(r"^([^;]+)(; length=([0-9]+))?$",
- request.META.get('HTTP_IF_MODIFIED_SINCE'),
- re.IGNORECASE)
- header_mtime = parse_http_date(matches.group(1))
- # We don't do length checking, just the date
- if int(latestmod) <= header_mtime:
- return HttpResponseNotModified(content_type='text/css')
- resp['Last-Modified'] = http_date(latestmod)
+ # We honor if-modified-since headers by looking at the most recently
+ # touched CSS file.
+ latestmod = 0
+ for fn in files:
+ try:
+ stime = os.stat(fn).st_mtime
+ if latestmod < stime:
+ latestmod = stime
+ except OSError:
+ # If we somehow referred to a file that didn't exist, or
+ # one that we couldn't access.
+ raise Http404('CSS (sub) not found')
+ if request.META.has_key('HTTP_IF_MODIFIED_SINCE'):
+ # This code is mostly stolen from django :)
+ matches = re.match(r"^([^;]+)(; length=([0-9]+))?$",
+ request.META.get('HTTP_IF_MODIFIED_SINCE'),
+ re.IGNORECASE)
+ header_mtime = parse_http_date(matches.group(1))
+ # We don't do length checking, just the date
+ if int(latestmod) <= header_mtime:
+ return HttpResponseNotModified(content_type='text/css')
+ resp['Last-Modified'] = http_date(latestmod)
- for fn in files:
- with open(fn) as f:
- resp.write("/* %s */\n" % fn)
- resp.write(f.read())
- resp.write("\n")
+ for fn in files:
+ with open(fn) as f:
+ resp.write("/* %s */\n" % fn)
+ resp.write(f.read())
+ resp.write("\n")
- return resp
+ return resp
@nocache
def csrf_failure(request, reason=''):
- resp = render(request, 'errors/csrf_failure.html', {
- 'reason': reason,
- })
- resp.status_code = 403 # Forbidden
- return resp
+ resp = render(request, 'errors/csrf_failure.html', {
+ 'reason': reason,
+ })
+ resp.status_code = 403 # Forbidden
+ return resp
# Basic information about the connection
@cache(seconds=30)
def system_information(request):
- return render(request,'core/system_information.html', {
- 'server': os.uname()[1],
- 'cache_server': request.META['REMOTE_ADDR'] or None,
- 'client_ip': get_client_ip(request),
- 'django_version': django.get_version(),
- })
+ return render(request,'core/system_information.html', {
+ 'server': os.uname()[1],
+ 'cache_server': request.META['REMOTE_ADDR'] or None,
+ 'client_ip': get_client_ip(request),
+ 'django_version': django.get_version(),
+ })
# Sync timestamp for automirror. Keep it around for 30 seconds
# Basically just a check that we can access the backend still...
@cache(seconds=30)
def sync_timestamp(request):
- s = datetime.now().strftime("%Y-%m-%d %H:%M:%S\n")
- r = HttpResponse(s, content_type='text/plain')
- r['Content-Length'] = len(s)
- return r
+ s = datetime.now().strftime("%Y-%m-%d %H:%M:%S\n")
+ r = HttpResponse(s, content_type='text/plain')
+ r['Content-Length'] = len(s)
+ return r
# List of all unapproved objects, for the special admin page
@login_required
@user_passes_test(lambda u: u.is_staff)
@user_passes_test(lambda u: u.groups.filter(name='pgweb moderators').exists())
def admin_pending(request):
- return render(request, 'core/admin_pending.html', {
- 'app_list': get_all_pending_moderations(),
- })
+ return render(request, 'core/admin_pending.html', {
+ 'app_list': get_all_pending_moderations(),
+ })
# Purge objects from varnish, for the admin pages
@login_required
@user_passes_test(lambda u: u.is_staff)
@user_passes_test(lambda u: u.groups.filter(name='varnish purgers').exists())
def admin_purge(request):
- if request.method == 'POST':
- url = request.POST['url']
- expr = request.POST['expr']
- xkey = request.POST['xkey']
- l = len(filter(None, [url, expr, xkey]))
- if l == 0:
- # Nothing specified
- return HttpResponseRedirect('.')
- elif l > 1:
- messages.error(request, "Can only specify one of url, expression and xkey!")
- return HttpResponseRedirect('.')
+ if request.method == 'POST':
+ url = request.POST['url']
+ expr = request.POST['expr']
+ xkey = request.POST['xkey']
+ l = len(filter(None, [url, expr, xkey]))
+ if l == 0:
+ # Nothing specified
+ return HttpResponseRedirect('.')
+ elif l > 1:
+ messages.error(request, "Can only specify one of url, expression and xkey!")
+ return HttpResponseRedirect('.')
- if url:
- varnish_purge(url)
- elif expr:
- varnish_purge_expr(expr)
- else:
- varnish_purge_xkey(xkey)
+ if url:
+ varnish_purge(url)
+ elif expr:
+ varnish_purge_expr(expr)
+ else:
+ varnish_purge_xkey(xkey)
- messages.info(request, "Purge added.")
- return HttpResponseRedirect('.')
+ messages.info(request, "Purge added.")
+ return HttpResponseRedirect('.')
- # Fetch list of latest purges
- curs = connection.cursor()
- curs.execute("SELECT added, completed, consumer, CASE WHEN mode = 'K' THEN 'XKey' WHEN mode='P' THEN 'URL' ELSE 'Expression' END, expr FROM varnishqueue.queue q LEFT JOIN varnishqueue.consumers c ON c.consumerid=q.consumerid ORDER BY added DESC")
- latest = curs.fetchall()
+ # Fetch list of latest purges
+ curs = connection.cursor()
+ curs.execute("SELECT added, completed, consumer, CASE WHEN mode = 'K' THEN 'XKey' WHEN mode='P' THEN 'URL' ELSE 'Expression' END, expr FROM varnishqueue.queue q LEFT JOIN varnishqueue.consumers c ON c.consumerid=q.consumerid ORDER BY added DESC")
+ latest = curs.fetchall()
- return render(request, 'core/admin_purge.html', {
- 'latest_purges': latest,
- })
+ return render(request, 'core/admin_purge.html', {
+ 'latest_purges': latest,
+ })
@csrf_exempt
def api_varnish_purge(request):
- if not request.META['REMOTE_ADDR'] in settings.VARNISH_PURGERS:
- return HttpServerError(request, "Invalid client address")
- if request.method != 'POST':
- return HttpServerError(request, "Can't use this way")
- n = int(request.POST['n'])
- curs = connection.cursor()
- for i in range(0, n):
- expr = request.POST['p%s' % i]
- curs.execute("SELECT varnish_purge_expr(%s)", (expr, ))
- return HttpResponse("Purged %s entries\n" % n)
+ if not request.META['REMOTE_ADDR'] in settings.VARNISH_PURGERS:
+ return HttpServerError(request, "Invalid client address")
+ if request.method != 'POST':
+ return HttpServerError(request, "Can't use this way")
+ n = int(request.POST['n'])
+ curs = connection.cursor()
+ for i in range(0, n):
+ expr = request.POST['p%s' % i]
+ curs.execute("SELECT varnish_purge_expr(%s)", (expr, ))
+ return HttpResponse("Purged %s entries\n" % n)
# Merge two organisations
@login_required
@user_passes_test(lambda u: u.is_superuser)
@transaction.atomic
def admin_mergeorg(request):
- if request.method == 'POST':
- form = MergeOrgsForm(data=request.POST)
- if form.is_valid():
- # Ok, try to actually merge organisations, by moving all objects
- # attached
- f = form.cleaned_data['merge_from']
- t = form.cleaned_data['merge_into']
- for e in f.event_set.all():
- e.org = t
- e.save()
- for n in f.newsarticle_set.all():
- n.org = t
- n.save()
- for p in f.product_set.all():
- p.org = t
- p.save()
- for p in f.professionalservice_set.all():
- p.organisation = t
- p.save()
- # Now that everything is moved, we can delete the organisation
- f.delete()
+ if request.method == 'POST':
+ form = MergeOrgsForm(data=request.POST)
+ if form.is_valid():
+ # Ok, try to actually merge organisations, by moving all objects
+ # attached
+ f = form.cleaned_data['merge_from']
+ t = form.cleaned_data['merge_into']
+ for e in f.event_set.all():
+ e.org = t
+ e.save()
+ for n in f.newsarticle_set.all():
+ n.org = t
+ n.save()
+ for p in f.product_set.all():
+ p.org = t
+ p.save()
+ for p in f.professionalservice_set.all():
+ p.organisation = t
+ p.save()
+ # Now that everything is moved, we can delete the organisation
+ f.delete()
- return HttpResponseRedirect("/admin/core/organisation/")
- # Else fall through to re-render form with errors
- else:
- form = MergeOrgsForm()
+ return HttpResponseRedirect("/admin/core/organisation/")
+ # Else fall through to re-render form with errors
+ else:
+ form = MergeOrgsForm()
- return render(request, 'core/admin_mergeorg.html', {
- 'form': form,
+ return render(request, 'core/admin_mergeorg.html', {
+ 'form': form,
})
diff --git a/pgweb/docs/forms.py b/pgweb/docs/forms.py
index 8a9fae6c..c26695ff 100644
--- a/pgweb/docs/forms.py
+++ b/pgweb/docs/forms.py
@@ -1,8 +1,8 @@
from django import forms
class DocCommentForm(forms.Form):
- name = forms.CharField(max_length=100, required=True, label='Your Name')
- email = forms.EmailField(max_length=100, required=True, label='Your Email')
- shortdesc = forms.CharField(max_length=100, required=True, label="Subject")
- details = forms.CharField(required=True, widget=forms.Textarea,
- label="What is your comment?")
+ name = forms.CharField(max_length=100, required=True, label='Your Name')
+ email = forms.EmailField(max_length=100, required=True, label='Your Email')
+ shortdesc = forms.CharField(max_length=100, required=True, label="Subject")
+ details = forms.CharField(required=True, widget=forms.Textarea,
+ label="What is your comment?")
diff --git a/pgweb/docs/migrations/0003_docs_alias.py b/pgweb/docs/migrations/0003_docs_alias.py
index a6c72a1b..745042e3 100644
--- a/pgweb/docs/migrations/0003_docs_alias.py
+++ b/pgweb/docs/migrations/0003_docs_alias.py
@@ -23,5 +23,5 @@ class Migration(migrations.Migration):
'verbose_name_plural': 'Doc page aliases',
},
),
- migrations.RunSQL("CREATE UNIQUE INDEX docsalias_unique ON docsalias (LEAST(file1, file2), GREATEST(file1, file2))"),
+ migrations.RunSQL("CREATE UNIQUE INDEX docsalias_unique ON docsalias (LEAST(file1, file2), GREATEST(file1, file2))"),
]
diff --git a/pgweb/docs/models.py b/pgweb/docs/models.py
index a2754b60..7a522147 100644
--- a/pgweb/docs/models.py
+++ b/pgweb/docs/models.py
@@ -2,32 +2,32 @@ from django.db import models
from pgweb.core.models import Version
class DocPage(models.Model):
- id = models.AutoField(null=False, primary_key=True)
- file = models.CharField(max_length=64, null=False, blank=False)
- version = models.ForeignKey(Version, null=False, blank=False, db_column='version', to_field='tree')
- title = models.CharField(max_length=256, null=True, blank=True)
- content = models.TextField(null=True, blank=True)
+ id = models.AutoField(null=False, primary_key=True)
+ file = models.CharField(max_length=64, null=False, blank=False)
+ version = models.ForeignKey(Version, null=False, blank=False, db_column='version', to_field='tree')
+ title = models.CharField(max_length=256, null=True, blank=True)
+ content = models.TextField(null=True, blank=True)
- def display_version(self):
- """Version as used for displaying and in URLs"""
- if self.version.tree == 0:
- return 'devel'
- else:
- return str(self.version.numtree)
+ def display_version(self):
+ """Version as used for displaying and in URLs"""
+ if self.version.tree == 0:
+ return 'devel'
+ else:
+ return str(self.version.numtree)
- class Meta:
- db_table = 'docs'
- # Index file first, because we want to list versions by file
- unique_together = [('file', 'version')]
+ class Meta:
+ db_table = 'docs'
+ # Index file first, because we want to list versions by file
+ unique_together = [('file', 'version')]
class DocPageAlias(models.Model):
- file1 = models.CharField(max_length=64, null=False, blank=False, unique=True)
- file2 = models.CharField(max_length=64, null=False, blank=False, unique=True)
+ file1 = models.CharField(max_length=64, null=False, blank=False, unique=True)
+ file2 = models.CharField(max_length=64, null=False, blank=False, unique=True)
- def __unicode__(self):
- return u"%s <-> %s" % (self.file1, self.file2)
+ def __unicode__(self):
+ return u"%s <-> %s" % (self.file1, self.file2)
- # XXX: needs a unique functional index as well, see the migration!
- class Meta:
- db_table = 'docsalias'
- verbose_name_plural='Doc page aliases'
+ # XXX: needs a unique functional index as well, see the migration!
+ class Meta:
+ db_table = 'docsalias'
+ verbose_name_plural='Doc page aliases'
diff --git a/pgweb/docs/struct.py b/pgweb/docs/struct.py
index 87e5635e..942d0573 100644
--- a/pgweb/docs/struct.py
+++ b/pgweb/docs/struct.py
@@ -2,52 +2,52 @@ from django.db import connection
from pgweb.core.models import Version
def get_struct():
- currentversion = Version.objects.get(current=True)
+ currentversion = Version.objects.get(current=True)
- # Can't use a model here, because we don't (for some reason) have a
- # hard link to the versions table here
- # Make sure we exclude the /devel/ docs because they are blocked by
- # robots.txt, and thus will cause tohusands of warnings in search
- # engines.
- curs = connection.cursor()
- curs.execute("SELECT d.version, d.file, v.docsloaded, v.testing FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version > 0 ORDER BY d.version DESC")
+ # Can't use a model here, because we don't (for some reason) have a
+ # hard link to the versions table here
+ # Make sure we exclude the /devel/ docs because they are blocked by
+ # robots.txt, and thus will cause tohusands of warnings in search
+ # engines.
+ curs = connection.cursor()
+ curs.execute("SELECT d.version, d.file, v.docsloaded, v.testing FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version > 0 ORDER BY d.version DESC")
- # Start priority is higher than average but lower than what we assign
- # to the current version of the docs.
- docprio = 0.8
- lastversion = None
+ # Start priority is higher than average but lower than what we assign
+ # to the current version of the docs.
+ docprio = 0.8
+ lastversion = None
- for version, filename, loaded, testing in curs.fetchall():
- # Decrease the priority with 0.1 for every version of the docs
- # we move back in time, until we reach 0.1. At 0.1 it's unlikely
- # to show up in a general search, but still possible to reach
- # through version specific searching for example.
- if lastversion != version:
- if docprio > 0.2:
- docprio -= 0.1
- lastversion = version
+ for version, filename, loaded, testing in curs.fetchall():
+ # Decrease the priority with 0.1 for every version of the docs
+ # we move back in time, until we reach 0.1. At 0.1 it's unlikely
+ # to show up in a general search, but still possible to reach
+ # through version specific searching for example.
+ if lastversion != version:
+ if docprio > 0.2:
+ docprio -= 0.1
+ lastversion = version
- if version >= 10:
- version = int(version)
+ if version >= 10:
+ version = int(version)
- yield ('docs/%s/%s' % (version, filename),
- testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio
- loaded)
+ yield ('docs/%s/%s' % (version, filename),
+ testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio
+ loaded)
- # Also yield the current version urls, with the highest
- # possible priority
- if version == currentversion.tree:
- yield ('docs/current/%s' % filename,
- 1.0, loaded)
+ # Also yield the current version urls, with the highest
+ # possible priority
+ if version == currentversion.tree:
+ yield ('docs/current/%s' % filename,
+ 1.0, loaded)
# For our internal sitemap (used only by our own search engine),
# include the devel version of the docs (and only those, since the
# other versions are already included)
def get_internal_struct():
- curs = connection.cursor()
- curs.execute("SELECT d.file, v.docsloaded FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version = 0")
+ curs = connection.cursor()
+ curs.execute("SELECT d.file, v.docsloaded FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version = 0")
- for filename, loaded in curs.fetchall():
- yield ('docs/devel/%s' % (filename, ),
- 0.1,
- loaded)
+ for filename, loaded in curs.fetchall():
+ yield ('docs/devel/%s' % (filename, ),
+ 0.1,
+ loaded)
diff --git a/pgweb/docs/views.py b/pgweb/docs/views.py
index 6cfb1860..d0936272 100644
--- a/pgweb/docs/views.py
+++ b/pgweb/docs/views.py
@@ -20,156 +20,156 @@ from forms import DocCommentForm
@allow_frames
@content_sources('style', "'unsafe-inline'")
def docpage(request, version, filename):
- loaddate = None
- # Get the current version both to map the /current/ url, and to later
- # determine if we allow comments on this page.
- currver = Version.objects.filter(current=True)[0].tree
- if version == 'current':
- ver = currver
- elif version == 'devel':
- ver = Decimal(0)
- loaddate = Version.objects.get(tree=Decimal(0)).docsloaded
- else:
- ver = Decimal(version)
- if ver == Decimal(0):
- raise Http404("Version not found")
+ loaddate = None
+ # Get the current version both to map the /current/ url, and to later
+ # determine if we allow comments on this page.
+ currver = Version.objects.filter(current=True)[0].tree
+ if version == 'current':
+ ver = currver
+ elif version == 'devel':
+ ver = Decimal(0)
+ loaddate = Version.objects.get(tree=Decimal(0)).docsloaded
+ else:
+ ver = Decimal(version)
+ if ver == Decimal(0):
+ raise Http404("Version not found")
- if ver < Decimal("7.1") and ver > Decimal(0):
- extension = "htm"
- else:
- extension = "html"
+ if ver < Decimal("7.1") and ver > Decimal(0):
+ extension = "htm"
+ else:
+ extension = "html"
- if ver < Decimal("7.1") and ver > Decimal(0):
- indexname = "postgres.htm"
- elif ver == Decimal("7.1"):
- indexname = "postgres.html"
- else:
- indexname = "index.html"
+ if ver < Decimal("7.1") and ver > Decimal(0):
+ indexname = "postgres.htm"
+ elif ver == Decimal("7.1"):
+ indexname = "postgres.html"
+ else:
+ indexname = "index.html"
- if ver >= 10 and version.find('.') > -1:
- # Version 10 and up, but specified as 10.0 / 11.0 etc, so redirect back without the
- # decimal.
- return HttpResponsePermanentRedirect("/docs/{0}/{1}.html".format(int(ver), filename))
+ if ver >= 10 and version.find('.') > -1:
+ # Version 10 and up, but specified as 10.0 / 11.0 etc, so redirect back without the
+ # decimal.
+ return HttpResponsePermanentRedirect("/docs/{0}/{1}.html".format(int(ver), filename))
- fullname = "%s.%s" % (filename, extension)
- page = get_object_or_404(DocPage, version=ver, file=fullname)
- versions = DocPage.objects.extra(
- where=["file=%s OR file IN (SELECT file2 FROM docsalias WHERE file1=%s) OR file IN (SELECT file1 FROM docsalias WHERE file2=%s)"],
- params=[fullname, fullname, fullname],
- select={
- 'supported':"COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')",
- 'testing':"COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)",
- }).order_by('-supported', 'version').only('version', 'file')
+ fullname = "%s.%s" % (filename, extension)
+ page = get_object_or_404(DocPage, version=ver, file=fullname)
+ versions = DocPage.objects.extra(
+ where=["file=%s OR file IN (SELECT file2 FROM docsalias WHERE file1=%s) OR file IN (SELECT file1 FROM docsalias WHERE file2=%s)"],
+ params=[fullname, fullname, fullname],
+ select={
+ 'supported':"COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')",
+ 'testing':"COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)",
+ }).order_by('-supported', 'version').only('version', 'file')
- return render(request, 'docs/docspage.html', {
- 'page': page,
- 'supported_versions': [v for v in versions if v.supported],
- 'devel_versions': [v for v in versions if not v.supported and v.testing],
- 'unsupported_versions': [v for v in versions if not v.supported and not v.testing],
- 'title': page.title,
- 'doc_index_filename': indexname,
- 'loaddate': loaddate,
- })
+ return render(request, 'docs/docspage.html', {
+ 'page': page,
+ 'supported_versions': [v for v in versions if v.supported],
+ 'devel_versions': [v for v in versions if not v.supported and v.testing],
+ 'unsupported_versions': [v for v in versions if not v.supported and not v.testing],
+ 'title': page.title,
+ 'doc_index_filename': indexname,
+ 'loaddate': loaddate,
+ })
def docspermanentredirect(request, version, typ, page, *args):
- """Provides a permanent redirect from the old static/interactive pages to
- the modern pages that do not have said keywords.
- """
- url = "/docs/%s/" % version
- if page:
- url += page
- return HttpResponsePermanentRedirect(url)
+ """Provides a permanent redirect from the old static/interactive pages to
+ the modern pages that do not have said keywords.
+ """
+ url = "/docs/%s/" % version
+ if page:
+ url += page
+ return HttpResponsePermanentRedirect(url)
def docsrootpage(request, version):
- return docpage(request, version, 'index')
+ return docpage(request, version, 'index')
def redirect_root(request, version):
- return HttpResponsePermanentRedirect("/docs/%s/" % version)
+ return HttpResponsePermanentRedirect("/docs/%s/" % version)
def root(request):
- versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
- return render_pgweb(request, 'docs', 'docs/index.html', {
- 'versions': versions,
- })
+ versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
+ return render_pgweb(request, 'docs', 'docs/index.html', {
+ 'versions': versions,
+ })
class _VersionPdfWrapper(object):
- """
- A wrapper around a version that knows to look for PDF files, and
- return their sizes.
- """
- def __init__(self, version):
- self.__version = version
- self.a4pdf = self._find_pdf('A4')
- self.uspdf = self._find_pdf('US')
- # Some versions have, ahem, strange index filenames
- if self.__version.tree < Decimal('6.4'):
- self.indexname = 'book01.htm'
- elif self.__version.tree < Decimal('7.0'):
- self.indexname = 'postgres.htm'
- elif self.__version.tree < Decimal('7.2'):
- self.indexname = 'postgres.html'
- else:
- self.indexname = 'index.html'
- def __getattr__(self, name):
- return getattr(self.__version, name)
- def _find_pdf(self, pagetype):
- try:
- return os.stat('%s/documentation/pdf/%s/postgresql-%s-%s.pdf' % (settings.STATIC_CHECKOUT, self.__version.numtree, self.__version.numtree, pagetype)).st_size
- except:
- return 0
+ """
+ A wrapper around a version that knows to look for PDF files, and
+ return their sizes.
+ """
+ def __init__(self, version):
+ self.__version = version
+ self.a4pdf = self._find_pdf('A4')
+ self.uspdf = self._find_pdf('US')
+ # Some versions have, ahem, strange index filenames
+ if self.__version.tree < Decimal('6.4'):
+ self.indexname = 'book01.htm'
+ elif self.__version.tree < Decimal('7.0'):
+ self.indexname = 'postgres.htm'
+ elif self.__version.tree < Decimal('7.2'):
+ self.indexname = 'postgres.html'
+ else:
+ self.indexname = 'index.html'
+ def __getattr__(self, name):
+ return getattr(self.__version, name)
+ def _find_pdf(self, pagetype):
+ try:
+ return os.stat('%s/documentation/pdf/%s/postgresql-%s-%s.pdf' % (settings.STATIC_CHECKOUT, self.__version.numtree, self.__version.numtree, pagetype)).st_size
+ except:
+ return 0
def manuals(request):
- versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
- return render_pgweb(request, 'docs', 'docs/manuals.html', {
- 'versions': [_VersionPdfWrapper(v) for v in versions],
- })
+ versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
+ return render_pgweb(request, 'docs', 'docs/manuals.html', {
+ 'versions': [_VersionPdfWrapper(v) for v in versions],
+ })
def manualarchive(request):
- versions = Version.objects.filter(testing=0,supported=False,tree__gt=0).order_by('-tree')
- return render_pgweb(request, 'docs', 'docs/archive.html', {
- 'versions': [_VersionPdfWrapper(v) for v in versions],
- })
+ versions = Version.objects.filter(testing=0,supported=False,tree__gt=0).order_by('-tree')
+ return render_pgweb(request, 'docs', 'docs/archive.html', {
+ 'versions': [_VersionPdfWrapper(v) for v in versions],
+ })
@login_required
def commentform(request, itemid, version, filename):
- v = get_object_or_404(Version, tree=version)
- if not v.supported:
- # No docs comments on unsupported versions
- return HttpResponseRedirect("/docs/{0}/{1}".format(version, filename))
+ v = get_object_or_404(Version, tree=version)
+ if not v.supported:
+ # No docs comments on unsupported versions
+ return HttpResponseRedirect("/docs/{0}/{1}".format(version, filename))
- if request.method == 'POST':
- form = DocCommentForm(request.POST)
- if form.is_valid():
- if version == '0.0':
- version = 'devel'
+ if request.method == 'POST':
+ form = DocCommentForm(request.POST)
+ if form.is_valid():
+ if version == '0.0':
+ version = 'devel'
- send_template_mail(
- settings.DOCSREPORT_NOREPLY_EMAIL,
- settings.DOCSREPORT_EMAIL,
- '%s' % form.cleaned_data['shortdesc'],
- 'docs/docsbugmail.txt', {
- 'version': version,
- 'filename': filename,
- 'details': form.cleaned_data['details'],
- },
- usergenerated=True,
- cc=form.cleaned_data['email'],
- replyto='%s, %s' % (form.cleaned_data['email'], settings.DOCSREPORT_EMAIL),
- sendername='PG Doc comments form'
- )
- return render_pgweb(request, 'docs', 'docs/docsbug_completed.html', {})
- else:
- form = DocCommentForm(initial={
- 'name': '%s %s' % (request.user.first_name, request.user.last_name),
- 'email': request.user.email,
- })
+ send_template_mail(
+ settings.DOCSREPORT_NOREPLY_EMAIL,
+ settings.DOCSREPORT_EMAIL,
+ '%s' % form.cleaned_data['shortdesc'],
+ 'docs/docsbugmail.txt', {
+ 'version': version,
+ 'filename': filename,
+ 'details': form.cleaned_data['details'],
+ },
+ usergenerated=True,
+ cc=form.cleaned_data['email'],
+ replyto='%s, %s' % (form.cleaned_data['email'], settings.DOCSREPORT_EMAIL),
+ sendername='PG Doc comments form'
+ )
+ return render_pgweb(request, 'docs', 'docs/docsbug_completed.html', {})
+ else:
+ form = DocCommentForm(initial={
+ 'name': '%s %s' % (request.user.first_name, request.user.last_name),
+ 'email': request.user.email,
+ })
- return render_pgweb(request, 'docs', 'base/form.html', {
- 'form': form,
- 'formitemtype': 'documentation comment',
- 'operation': 'Submit',
- 'form_intro': template_to_string('docs/docsbug.html', {
- 'user': request.user,
- }),
- 'savebutton': 'Send Email',
- })
+ return render_pgweb(request, 'docs', 'base/form.html', {
+ 'form': form,
+ 'formitemtype': 'documentation comment',
+ 'operation': 'Submit',
+ 'form_intro': template_to_string('docs/docsbug.html', {
+ 'user': request.user,
+ }),
+ 'savebutton': 'Send Email',
+ })
diff --git a/pgweb/downloads/admin.py b/pgweb/downloads/admin.py
index 3f417b4d..2449acbd 100644
--- a/pgweb/downloads/admin.py
+++ b/pgweb/downloads/admin.py
@@ -8,49 +8,49 @@ from pgweb.util.admin import PgwebAdmin
from models import StackBuilderApp, Category, Product, LicenceType
class ProductAdmin(PgwebAdmin):
- list_display = ('name', 'org', 'approved', 'lastconfirmed',)
- list_filter = ('approved',)
- search_fields = ('name', 'description', )
- ordering = ('name', )
+ list_display = ('name', 'org', 'approved', 'lastconfirmed',)
+ list_filter = ('approved',)
+ search_fields = ('name', 'description', )
+ ordering = ('name', )
def duplicate_stackbuilderapp(modeladmin, request, queryset):
- # Duplicate each individual selected object, but turn off
- # the active flag if it's on.
- for o in queryset:
- o.id = None # Triggers creation of a new object
- o.active = False
- o.textid = o.textid + "_new"
- o.save()
+ # Duplicate each individual selected object, but turn off
+ # the active flag if it's on.
+ for o in queryset:
+ o.id = None # Triggers creation of a new object
+ o.active = False
+ o.textid = o.textid + "_new"
+ o.save()
duplicate_stackbuilderapp.short_description = "Duplicate application"
class StackBuilderAppAdminForm(forms.ModelForm):
- class Meta:
- model = StackBuilderApp
- exclude = ()
+ class Meta:
+ model = StackBuilderApp
+ exclude = ()
- def clean_textid(self):
- if not re.match('^[a-z0-9_]*$', self.cleaned_data['textid']):
- raise ValidationError('Only lowerchase characters, numbers and underscore allowed!')
- return self.cleaned_data['textid']
+ def clean_textid(self):
+ if not re.match('^[a-z0-9_]*$', self.cleaned_data['textid']):
+ raise ValidationError('Only lowerchase characters, numbers and underscore allowed!')
+ return self.cleaned_data['textid']
- def clean_txtdependencies(self):
- if len(self.cleaned_data['txtdependencies']) == 0:
- return ''
+ def clean_txtdependencies(self):
+ if len(self.cleaned_data['txtdependencies']) == 0:
+ return ''
- deplist = self.cleaned_data['txtdependencies'].split(',')
- if len(deplist) != len(set(deplist)):
- raise ValidationError('Duplicate dependencies not allowed!')
+ deplist = self.cleaned_data['txtdependencies'].split(',')
+ if len(deplist) != len(set(deplist)):
+ raise ValidationError('Duplicate dependencies not allowed!')
- for d in deplist:
- if not StackBuilderApp.objects.filter(textid=d).exists():
- raise ValidationError("Dependency '%s' does not exist!" % d)
- return self.cleaned_data['txtdependencies']
+ for d in deplist:
+ if not StackBuilderApp.objects.filter(textid=d).exists():
+ raise ValidationError("Dependency '%s' does not exist!" % d)
+ return self.cleaned_data['txtdependencies']
class StackBuilderAppAdmin(admin.ModelAdmin):
- list_display = ('textid', 'active', 'name', 'platform', 'version', )
- actions = [duplicate_stackbuilderapp, ]
- form = StackBuilderAppAdminForm
+ list_display = ('textid', 'active', 'name', 'platform', 'version', )
+ actions = [duplicate_stackbuilderapp, ]
+ form = StackBuilderAppAdminForm
admin.site.register(Category)
admin.site.register(LicenceType)
diff --git a/pgweb/downloads/forms.py b/pgweb/downloads/forms.py
index 93a952dd..150e0b70 100644
--- a/pgweb/downloads/forms.py
+++ b/pgweb/downloads/forms.py
@@ -4,12 +4,12 @@ from pgweb.core.models import Organisation
from models import Product
class ProductForm(forms.ModelForm):
- form_intro = """Note that in order to register a new product, you must first register an organisation.
+ form_intro = """Note that in order to register a new product, you must first register an organisation.
If you have not done so, use this form."""
- def __init__(self, *args, **kwargs):
- super(ProductForm, self).__init__(*args, **kwargs)
- def filter_by_user(self, user):
- self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
- class Meta:
- model = Product
- exclude = ('lastconfirmed', 'approved', )
+ def __init__(self, *args, **kwargs):
+ super(ProductForm, self).__init__(*args, **kwargs)
+ def filter_by_user(self, user):
+ self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+ class Meta:
+ model = Product
+ exclude = ('lastconfirmed', 'approved', )
diff --git a/pgweb/downloads/models.py b/pgweb/downloads/models.py
index 61849e77..682fdd92 100644
--- a/pgweb/downloads/models.py
+++ b/pgweb/downloads/models.py
@@ -4,85 +4,85 @@ from pgweb.core.models import Organisation
class Category(models.Model):
- catname = models.CharField(max_length=100, null=False, blank=False)
- blurb = models.TextField(null=False, blank=True)
+ catname = models.CharField(max_length=100, null=False, blank=False)
+ blurb = models.TextField(null=False, blank=True)
- def __unicode__(self):
- return self.catname
+ def __unicode__(self):
+ return self.catname
- class Meta:
- ordering = ('catname',)
+ class Meta:
+ ordering = ('catname',)
class LicenceType(models.Model):
- typename = models.CharField(max_length=100, null=False, blank=False)
+ typename = models.CharField(max_length=100, null=False, blank=False)
- def __unicode__(self):
- return self.typename
+ def __unicode__(self):
+ return self.typename
- class Meta:
- ordering = ('typename',)
+ class Meta:
+ ordering = ('typename',)
class Product(models.Model):
- name = models.CharField(max_length=100, null=False, blank=False, unique=True)
- approved = models.BooleanField(null=False, default=False)
- org = models.ForeignKey(Organisation, db_column="publisher_id", null=False, verbose_name="Organisation")
- url = models.URLField(null=False, blank=False)
- category = models.ForeignKey(Category, null=False)
- licencetype = models.ForeignKey(LicenceType, null=False, verbose_name="Licence type")
- description = models.TextField(null=False, blank=False)
- price = models.CharField(max_length=200, null=False, blank=True)
- lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True)
+ name = models.CharField(max_length=100, null=False, blank=False, unique=True)
+ approved = models.BooleanField(null=False, default=False)
+ org = models.ForeignKey(Organisation, db_column="publisher_id", null=False, verbose_name="Organisation")
+ url = models.URLField(null=False, blank=False)
+ category = models.ForeignKey(Category, null=False)
+ licencetype = models.ForeignKey(LicenceType, null=False, verbose_name="Licence type")
+ description = models.TextField(null=False, blank=False)
+ price = models.CharField(max_length=200, null=False, blank=True)
+ lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True)
- send_notification = True
- markdown_fields = ('description', )
+ send_notification = True
+ markdown_fields = ('description', )
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
- def verify_submitter(self, user):
- return (len(self.org.managers.filter(pk=user.pk)) == 1)
+ def verify_submitter(self, user):
+ return (len(self.org.managers.filter(pk=user.pk)) == 1)
- class Meta:
- ordering = ('name',)
+ class Meta:
+ ordering = ('name',)
class StackBuilderApp(models.Model):
- textid = models.CharField(max_length=100, null=False, blank=False)
- version = models.CharField(max_length=20, null=False, blank=False)
- platform = models.CharField(max_length=20, null=False, blank=False,
- choices= (('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), ('osx', 'Mac OS X'),
- ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
- )
- secondaryplatform = models.CharField(max_length=20, null=False, blank=True,
- choices= (('', 'None'), ('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'),
- ('osx', 'Mac OS X'), ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
- )
- name = models.CharField(max_length=500, null=False, blank=False)
- active = models.BooleanField(null=False, blank=False, default=True)
- description = models.TextField(null=False, blank=False)
- category = models.CharField(max_length=100, null=False, blank=False)
- pgversion = models.CharField(max_length=5, null=False, blank=True)
- edbversion = models.CharField(max_length=5, null=False, blank=True)
- format = models.CharField(max_length=5, null=False, blank=False,
- choices = (('bin', 'Linux .bin'), ('app', 'Mac .app'),
- ('pkg', 'Mac .pkg'), ('mpkg', 'Mac .mpkg'),
- ('exe', 'Windows .exe'), ('msi', 'Windows .msi'))
- )
- installoptions = models.CharField(max_length=500, null=False, blank=True)
- upgradeoptions = models.CharField(max_length=500, null=False, blank=True)
- checksum = models.CharField(max_length=32, null=False, blank=False)
- mirrorpath = models.CharField(max_length=500, null=False, blank=True)
- alturl = models.URLField(max_length=500, null=False, blank=True)
- txtdependencies = models.CharField(max_length=1000, null=False, blank=True,
- verbose_name='Dependencies',
- help_text='Comma separated list of text dependencies, no spaces!')
- versionkey = models.CharField(max_length=500, null=False, blank=False)
- manifesturl = models.URLField(max_length=500, null=False, blank=True)
+ textid = models.CharField(max_length=100, null=False, blank=False)
+ version = models.CharField(max_length=20, null=False, blank=False)
+ platform = models.CharField(max_length=20, null=False, blank=False,
+ choices= (('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), ('osx', 'Mac OS X'),
+ ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
+ )
+ secondaryplatform = models.CharField(max_length=20, null=False, blank=True,
+ choices= (('', 'None'), ('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'),
+ ('osx', 'Mac OS X'), ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
+ )
+ name = models.CharField(max_length=500, null=False, blank=False)
+ active = models.BooleanField(null=False, blank=False, default=True)
+ description = models.TextField(null=False, blank=False)
+ category = models.CharField(max_length=100, null=False, blank=False)
+ pgversion = models.CharField(max_length=5, null=False, blank=True)
+ edbversion = models.CharField(max_length=5, null=False, blank=True)
+ format = models.CharField(max_length=5, null=False, blank=False,
+ choices = (('bin', 'Linux .bin'), ('app', 'Mac .app'),
+ ('pkg', 'Mac .pkg'), ('mpkg', 'Mac .mpkg'),
+ ('exe', 'Windows .exe'), ('msi', 'Windows .msi'))
+ )
+ installoptions = models.CharField(max_length=500, null=False, blank=True)
+ upgradeoptions = models.CharField(max_length=500, null=False, blank=True)
+ checksum = models.CharField(max_length=32, null=False, blank=False)
+ mirrorpath = models.CharField(max_length=500, null=False, blank=True)
+ alturl = models.URLField(max_length=500, null=False, blank=True)
+ txtdependencies = models.CharField(max_length=1000, null=False, blank=True,
+ verbose_name='Dependencies',
+ help_text='Comma separated list of text dependencies, no spaces!')
+ versionkey = models.CharField(max_length=500, null=False, blank=False)
+ manifesturl = models.URLField(max_length=500, null=False, blank=True)
- purge_urls = ('/applications-v2.xml', )
+ purge_urls = ('/applications-v2.xml', )
- def __unicode__(self):
- return "%s %s %s" % (self.textid, self.version, self.platform)
+ def __unicode__(self):
+ return "%s %s %s" % (self.textid, self.version, self.platform)
- class Meta:
- unique_together = ('textid', 'version', 'platform', )
- ordering = ('textid', 'name', 'platform', )
+ class Meta:
+ unique_together = ('textid', 'version', 'platform', )
+ ordering = ('textid', 'name', 'platform', )
diff --git a/pgweb/downloads/struct.py b/pgweb/downloads/struct.py
index 0f717f2e..27a92072 100644
--- a/pgweb/downloads/struct.py
+++ b/pgweb/downloads/struct.py
@@ -1,10 +1,10 @@
from models import Category
def get_struct():
- # Products
- for c in Category.objects.all():
- yield ('download/products/%s/' % c.id,
- 0.3)
+ # Products
+ for c in Category.objects.all():
+ yield ('download/products/%s/' % c.id,
+ 0.3)
- # Don't index the ftp browser for now - it doesn't really contain
- # anything useful to search
+ # Don't index the ftp browser for now - it doesn't really contain
+ # anything useful to search
diff --git a/pgweb/downloads/views.py b/pgweb/downloads/views.py
index 051afc82..b51f829e 100644
--- a/pgweb/downloads/views.py
+++ b/pgweb/downloads/views.py
@@ -21,101 +21,101 @@ from forms import ProductForm
# FTP browser
#######
def ftpbrowser(request, subpath):
- if subpath:
- # An actual path has been selected. Fancy!
+ if subpath:
+ # An actual path has been selected. Fancy!
- if subpath.find('..') > -1:
- # Just claim it doesn't exist if the user tries to do this
- # type of bad thing
- raise Http404
- subpath = subpath.strip('/')
- else:
- subpath=""
+ if subpath.find('..') > -1:
+ # Just claim it doesn't exist if the user tries to do this
+ # type of bad thing
+ raise Http404
+ subpath = subpath.strip('/')
+ else:
+ subpath=""
- # Pickle up the list of things we need
- try:
- f = open(settings.FTP_PICKLE, "rb")
- allnodes = pickle.load(f)
- f.close()
- except Exception, e:
- return HttpServerError(request, "Failed to load ftp site information: %s" % e)
+ # Pickle up the list of things we need
+ try:
+ f = open(settings.FTP_PICKLE, "rb")
+ allnodes = pickle.load(f)
+ f.close()
+ except Exception, e:
+ return HttpServerError(request, "Failed to load ftp site information: %s" % e)
- # An incoming subpath may either be canonical, or have one or more elements
- # present that are actually symlinks. For each element of the path, test to
- # see if it is present in the pickle. If not, look for a symlink entry with
- # and if present, replace the original entry with the symlink target.
- canonpath = ''
- if subpath != '':
- parent = ''
- for d in subpath.split('/'):
- # Check if allnodes contains a node matching the path
- if allnodes[parent].has_key(d):
- if allnodes[parent][d]['t'] == 'd':
- canonpath = os.path.join(canonpath, d)
- elif allnodes[parent][d]['t'] == 'l':
- canonpath = os.path.join(canonpath, allnodes[parent][d]['d']).strip('/')
- else:
- # There's a matching node, but it's not a link or a directory
- raise Http404
+ # An incoming subpath may either be canonical, or have one or more elements
+ # present that are actually symlinks. For each element of the path, test to
+ # see if it is present in the pickle. If not, look for a symlink entry with
+ # and if present, replace the original entry with the symlink target.
+ canonpath = ''
+ if subpath != '':
+ parent = ''
+ for d in subpath.split('/'):
+ # Check if allnodes contains a node matching the path
+ if allnodes[parent].has_key(d):
+ if allnodes[parent][d]['t'] == 'd':
+ canonpath = os.path.join(canonpath, d)
+ elif allnodes[parent][d]['t'] == 'l':
+ canonpath = os.path.join(canonpath, allnodes[parent][d]['d']).strip('/')
+ else:
+ # There's a matching node, but it's not a link or a directory
+ raise Http404
- parent = canonpath
- else:
- # There's no matching node
- raise Http404
+ parent = canonpath
+ else:
+ # There's no matching node
+ raise Http404
- # If we wound up with a canonical path that doesn't match the original request,
- # redirect the user
- canonpath = canonpath.strip('/')
- if subpath != canonpath:
- return HttpResponseRedirect('/ftp/' + canonpath)
+ # If we wound up with a canonical path that doesn't match the original request,
+ # redirect the user
+ canonpath = canonpath.strip('/')
+ if subpath != canonpath:
+ return HttpResponseRedirect('/ftp/' + canonpath)
- node = allnodes[subpath]
- del allnodes
+ node = allnodes[subpath]
+ del allnodes
- # Add all directories
- directories = [{'link': k, 'url': k, 'type': 'd'} for k,v in node.items() if v['t'] == 'd']
- # Add all symlinks (only directories supported)
- directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k,v in node.items() if v['t'] == 'l'])
+ # Add all directories
+ directories = [{'link': k, 'url': k, 'type': 'd'} for k,v in node.items() if v['t'] == 'd']
+ # Add all symlinks (only directories supported)
+ directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k,v in node.items() if v['t'] == 'l'])
- # A ittle early sorting wouldn't go amiss, so .. ends up at the top
- directories.sort(key = version_sort, reverse=True)
+ # A ittle early sorting wouldn't go amiss, so .. ends up at the top
+ directories.sort(key = version_sort, reverse=True)
- # Add a link to the parent directory
- if subpath:
- directories.insert(0, {'link':'[Parent Directory]', 'url':'..'})
+ # Add a link to the parent directory
+ if subpath:
+ directories.insert(0, {'link':'[Parent Directory]', 'url':'..'})
- # Fetch files
- files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k,v in node.items() if v['t'] == 'f']
+ # Fetch files
+ files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k,v in node.items() if v['t'] == 'f']
- breadcrumbs = []
- if subpath:
- breadroot = ""
- for pathpiece in subpath.split('/'):
- if not pathpiece:
- # Trailing slash will give out an empty pathpiece
- continue
- if breadroot:
- breadroot = "%s/%s" % (breadroot, pathpiece)
- else:
- breadroot = pathpiece
- breadcrumbs.append({'name': pathpiece, 'path': breadroot});
+ breadcrumbs = []
+ if subpath:
+ breadroot = ""
+ for pathpiece in subpath.split('/'):
+ if not pathpiece:
+ # Trailing slash will give out an empty pathpiece
+ continue
+ if breadroot:
+ breadroot = "%s/%s" % (breadroot, pathpiece)
+ else:
+ breadroot = pathpiece
+ breadcrumbs.append({'name': pathpiece, 'path': breadroot});
- # Check if there are any "content files" we should render directly on the webpage
- file_readme = (node.has_key('README') and node['README']['t']=='f') and node['README']['c'] or None;
- file_message = (node.has_key('.message') and node['.message']['t']=='f') and node['.message']['c'] or None;
- file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None;
+ # Check if there are any "content files" we should render directly on the webpage
+ file_readme = (node.has_key('README') and node['README']['t']=='f') and node['README']['c'] or None;
+ file_message = (node.has_key('.message') and node['.message']['t']=='f') and node['.message']['c'] or None;
+ file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None;
- del node
+ del node
- return render_pgweb(request, 'download', 'downloads/ftpbrowser.html', {
- 'basepath': subpath.rstrip('/'),
- 'directories': directories,
- 'files': sorted(files),
- 'breadcrumbs': breadcrumbs,
- 'readme': file_readme,
- 'messagefile': file_message,
- 'maintainer': file_maintainer,
- })
+ return render_pgweb(request, 'download', 'downloads/ftpbrowser.html', {
+ 'basepath': subpath.rstrip('/'),
+ 'directories': directories,
+ 'files': sorted(files),
+ 'breadcrumbs': breadcrumbs,
+ 'readme': file_readme,
+ 'messagefile': file_message,
+ 'maintainer': file_maintainer,
+ })
# Accept an upload of the ftpsite pickle. This is fairly resource consuming,
@@ -125,135 +125,135 @@ def ftpbrowser(request, subpath):
# file in parallel.
@csrf_exempt
def uploadftp(request):
- if request.method != 'PUT':
- return HttpServerError(request, "Invalid method")
- if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS:
- return HttpServerError(request, "Invalid client address")
- # We have the data in request.body. Attempt to load it as
- # a pickle to make sure it's properly formatted
- pickle.loads(request.body)
+ if request.method != 'PUT':
+ return HttpServerError(request, "Invalid method")
+ if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS:
+ return HttpServerError(request, "Invalid client address")
+ # We have the data in request.body. Attempt to load it as
+ # a pickle to make sure it's properly formatted
+ pickle.loads(request.body)
- # Next, check if it's the same as the current file
- f = open(settings.FTP_PICKLE, "rb")
- x = f.read()
- f.close()
- if x == request.body:
- # Don't rewrite the file or purge any data if nothing changed
- return HttpResponse("NOT CHANGED", content_type="text/plain")
+ # Next, check if it's the same as the current file
+ f = open(settings.FTP_PICKLE, "rb")
+ x = f.read()
+ f.close()
+ if x == request.body:
+ # Don't rewrite the file or purge any data if nothing changed
+ return HttpResponse("NOT CHANGED", content_type="text/plain")
- # File has changed - let's write it!
- f = open("%s.new" % settings.FTP_PICKLE, "wb")
- f.write(request.body)
- f.close()
- os.rename("%s.new" % settings.FTP_PICKLE, settings.FTP_PICKLE)
+ # File has changed - let's write it!
+ f = open("%s.new" % settings.FTP_PICKLE, "wb")
+ f.write(request.body)
+ f.close()
+ os.rename("%s.new" % settings.FTP_PICKLE, settings.FTP_PICKLE)
- # Purge it out of varnish so we start responding right away
- varnish_purge("/ftp")
+ # Purge it out of varnish so we start responding right away
+ varnish_purge("/ftp")
- # Finally, indicate to the client that we're happy
- return HttpResponse("OK", content_type="text/plain")
+ # Finally, indicate to the client that we're happy
+ return HttpResponse("OK", content_type="text/plain")
@csrf_exempt
def uploadyum(request):
- if request.method != 'PUT':
- return HttpServerError(request, "Invalid method")
- if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS:
- return HttpServerError(request, "Invalid client address")
- # We have the data in request.body. Attempt to load it as
- # json to ensure correct format.
- json.loads(request.body)
+ if request.method != 'PUT':
+ return HttpServerError(request, "Invalid method")
+ if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS:
+ return HttpServerError(request, "Invalid client address")
+ # We have the data in request.body. Attempt to load it as
+ # json to ensure correct format.
+ json.loads(request.body)
- # Next, check if it's the same as the current file
- if os.path.isfile(settings.YUM_JSON):
- with open(settings.YUM_JSON, "r") as f:
- if f.read() == request.body:
- # Don't rewrite the file or purge any data if nothing changed
- return HttpResponse("NOT CHANGED", content_type="text/plain")
+ # Next, check if it's the same as the current file
+ if os.path.isfile(settings.YUM_JSON):
+ with open(settings.YUM_JSON, "r") as f:
+ if f.read() == request.body:
+ # Don't rewrite the file or purge any data if nothing changed
+ return HttpResponse("NOT CHANGED", content_type="text/plain")
- # File has changed - let's write it!
- with open("%s.new" % settings.YUM_JSON, "w") as f:
- f.write(request.body)
+ # File has changed - let's write it!
+ with open("%s.new" % settings.YUM_JSON, "w") as f:
+ f.write(request.body)
- os.rename("%s.new" % settings.YUM_JSON, settings.YUM_JSON)
+ os.rename("%s.new" % settings.YUM_JSON, settings.YUM_JSON)
- # Purge it out of varnish so we start responding right away
- varnish_purge("/download/js/yum.js")
+ # Purge it out of varnish so we start responding right away
+ varnish_purge("/download/js/yum.js")
- # Finally, indicate to the client that we're happy
- return HttpResponse("OK", content_type="text/plain")
+ # Finally, indicate to the client that we're happy
+ return HttpResponse("OK", content_type="text/plain")
@nocache
def mirrorselect(request, path):
- # Old access to mirrors will just redirect to the main ftp site.
- # We don't really need it anymore, but the cost of keeping it is
- # very low...
- return HttpResponseRedirect("https://ftp.postgresql.org/pub/%s" % path)
+ # Old access to mirrors will just redirect to the main ftp site.
+ # We don't really need it anymore, but the cost of keeping it is
+ # very low...
+ return HttpResponseRedirect("https://ftp.postgresql.org/pub/%s" % path)
# Render javascript for yum downloads
def yum_js(request):
- with open(settings.YUM_JSON) as f:
- jsonstr = f.read()
- return render(request, 'downloads/js/yum.js', {
- 'json': jsonstr,
- 'supported_versions': ','.join([str(v.numtree) for v in Version.objects.filter(supported=True)]),
- }, content_type='application/json')
+ with open(settings.YUM_JSON) as f:
+ jsonstr = f.read()
+ return render(request, 'downloads/js/yum.js', {
+ 'json': jsonstr,
+ 'supported_versions': ','.join([str(v.numtree) for v in Version.objects.filter(supported=True)]),
+ }, content_type='application/json')
#######
# Product catalogue
#######
def categorylist(request):
- categories = Category.objects.all()
- return render_pgweb(request, 'download', 'downloads/categorylist.html', {
- 'categories': categories,
- })
+ categories = Category.objects.all()
+ return render_pgweb(request, 'download', 'downloads/categorylist.html', {
+ 'categories': categories,
+ })
def productlist(request, catid, junk=None):
- category = get_object_or_404(Category, pk=catid)
- products = Product.objects.select_related('org','licencetype').filter(category=category, approved=True)
- return render_pgweb(request, 'download', 'downloads/productlist.html', {
- 'category': category,
- 'products': products,
- 'productcount': len(products),
- })
+ category = get_object_or_404(Category, pk=catid)
+ products = Product.objects.select_related('org','licencetype').filter(category=category, approved=True)
+ return render_pgweb(request, 'download', 'downloads/productlist.html', {
+ 'category': category,
+ 'products': products,
+ 'productcount': len(products),
+ })
@login_required
def productform(request, itemid):
- return simple_form(Product, itemid, request, ProductForm,
- redirect='/account/edit/products/')
+ return simple_form(Product, itemid, request, ProductForm,
+ redirect='/account/edit/products/')
#######
# Stackbuilder
#######
def applications_v2_xml(request):
- all_apps = StackBuilderApp.objects.select_related().filter(active=True)
+ all_apps = StackBuilderApp.objects.select_related().filter(active=True)
- resp = HttpResponse(content_type='text/xml')
- x = PgXmlHelper(resp, skipempty=True)
- x.startDocument()
- x.startElement('applications', {})
- for a in all_apps:
- x.startElement('application', {})
- x.add_xml_element('id', a.textid)
- x.add_xml_element('platform', a.platform)
- x.add_xml_element('secondaryplatform', a.secondaryplatform)
- x.add_xml_element('version', a.version)
- x.add_xml_element('name', a.name)
- x.add_xml_element('description', a.description)
- x.add_xml_element('category', a.category)
- x.add_xml_element('pgversion', a.pgversion)
- x.add_xml_element('edbversion', a.edbversion)
- x.add_xml_element('format', a.format)
- x.add_xml_element('installoptions', a.installoptions)
- x.add_xml_element('upgradeoptions', a.upgradeoptions)
- x.add_xml_element('checksum', a.checksum)
- x.add_xml_element('mirrorpath', a.mirrorpath)
- x.add_xml_element('alturl', a.alturl)
- x.add_xml_element('versionkey', a.versionkey)
- x.add_xml_element('manifesturl', a.manifesturl)
- for dep in a.txtdependencies.split(','):
- x.add_xml_element('dependency', dep)
- x.endElement('application')
- x.endElement('applications')
- x.endDocument()
- return resp
+ resp = HttpResponse(content_type='text/xml')
+ x = PgXmlHelper(resp, skipempty=True)
+ x.startDocument()
+ x.startElement('applications', {})
+ for a in all_apps:
+ x.startElement('application', {})
+ x.add_xml_element('id', a.textid)
+ x.add_xml_element('platform', a.platform)
+ x.add_xml_element('secondaryplatform', a.secondaryplatform)
+ x.add_xml_element('version', a.version)
+ x.add_xml_element('name', a.name)
+ x.add_xml_element('description', a.description)
+ x.add_xml_element('category', a.category)
+ x.add_xml_element('pgversion', a.pgversion)
+ x.add_xml_element('edbversion', a.edbversion)
+ x.add_xml_element('format', a.format)
+ x.add_xml_element('installoptions', a.installoptions)
+ x.add_xml_element('upgradeoptions', a.upgradeoptions)
+ x.add_xml_element('checksum', a.checksum)
+ x.add_xml_element('mirrorpath', a.mirrorpath)
+ x.add_xml_element('alturl', a.alturl)
+ x.add_xml_element('versionkey', a.versionkey)
+ x.add_xml_element('manifesturl', a.manifesturl)
+ for dep in a.txtdependencies.split(','):
+ x.add_xml_element('dependency', dep)
+ x.endElement('application')
+ x.endElement('applications')
+ x.endDocument()
+ return resp
diff --git a/pgweb/events/admin.py b/pgweb/events/admin.py
index dd2084fa..2ac51df1 100644
--- a/pgweb/events/admin.py
+++ b/pgweb/events/admin.py
@@ -5,35 +5,35 @@ from pgweb.util.admin import PgwebAdmin
from models import Event
def approve_event(modeladmin, request, queryset):
- # We need to do this in a loop even though it's less efficient,
- # since using queryset.update() will not send the moderation messages.
- for e in queryset:
- e.approved = True
- e.save()
+ # We need to do this in a loop even though it's less efficient,
+ # since using queryset.update() will not send the moderation messages.
+ for e in queryset:
+ e.approved = True
+ e.save()
approve_event.short_description = 'Approve event'
class EventAdminForm(forms.ModelForm):
- class Meta:
- model = Event
- exclude = ()
+ class Meta:
+ model = Event
+ exclude = ()
- def clean(self):
- cleaned_data = super(EventAdminForm, self).clean()
- if not cleaned_data.get('isonline'):
- if not cleaned_data.get('city'):
- self._errors['city'] = self.error_class(['City must be specified for non-online events'])
- del cleaned_data['city']
- if not cleaned_data.get('country'):
- self._errors['country'] = self.error_class(['Country must be specified for non-online events'])
- del cleaned_data['country']
- return cleaned_data
+ def clean(self):
+ cleaned_data = super(EventAdminForm, self).clean()
+ if not cleaned_data.get('isonline'):
+ if not cleaned_data.get('city'):
+ self._errors['city'] = self.error_class(['City must be specified for non-online events'])
+ del cleaned_data['city']
+ if not cleaned_data.get('country'):
+ self._errors['country'] = self.error_class(['Country must be specified for non-online events'])
+ del cleaned_data['country']
+ return cleaned_data
class EventAdmin(PgwebAdmin):
- list_display = ('title', 'org', 'startdate', 'enddate', 'approved',)
- list_filter = ('approved',)
- search_fields = ('summary', 'details', 'title', )
- actions = [approve_event, ]
- form = EventAdminForm
+ list_display = ('title', 'org', 'startdate', 'enddate', 'approved',)
+ list_filter = ('approved',)
+ search_fields = ('summary', 'details', 'title', )
+ actions = [approve_event, ]
+ form = EventAdminForm
admin.site.register(Event, EventAdmin)
diff --git a/pgweb/events/feeds.py b/pgweb/events/feeds.py
index 6273975e..21aa0d4b 100644
--- a/pgweb/events/feeds.py
+++ b/pgweb/events/feeds.py
@@ -5,17 +5,17 @@ from models import Event
from datetime import datetime, time
class EventFeed(Feed):
- title = description = "PostgreSQL events"
- link = "https://www.postgresql.org/"
+ title = description = "PostgreSQL events"
+ link = "https://www.postgresql.org/"
- description_template = 'events/rss_description.html'
- title_template = 'events/rss_title.html'
+ description_template = 'events/rss_description.html'
+ title_template = 'events/rss_title.html'
- def items(self):
- return Event.objects.filter(approved=True)[:10]
+ def items(self):
+ return Event.objects.filter(approved=True)[:10]
- def item_link(self, obj):
- return "https://www.postgresql.org/about/event/%s/" % obj.id
+ def item_link(self, obj):
+ return "https://www.postgresql.org/about/event/%s/" % obj.id
- def item_pubdate(self, obj):
- return datetime.combine(obj.startdate,time.min)
+ def item_pubdate(self, obj):
+ return datetime.combine(obj.startdate,time.min)
diff --git a/pgweb/events/forms.py b/pgweb/events/forms.py
index 9dfce0c8..460c5a76 100644
--- a/pgweb/events/forms.py
+++ b/pgweb/events/forms.py
@@ -5,45 +5,45 @@ from pgweb.core.models import Organisation
from models import Event
class EventForm(forms.ModelForm):
- toggle_fields = [
- {
- 'name': 'isonline',
- 'invert': True,
- 'fields': ['city', 'state', 'country',]
- },
- ]
- def __init__(self, *args, **kwargs):
- super(EventForm, self).__init__(*args, **kwargs)
- def filter_by_user(self, user):
- self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+ toggle_fields = [
+ {
+ 'name': 'isonline',
+ 'invert': True,
+ 'fields': ['city', 'state', 'country',]
+ },
+ ]
+ def __init__(self, *args, **kwargs):
+ super(EventForm, self).__init__(*args, **kwargs)
+ def filter_by_user(self, user):
+ self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
- def clean(self):
- cleaned_data = super(EventForm, self).clean()
- if not cleaned_data.get('isonline'):
- # Non online events require city and country
- # (we don't require state, since many countries have no such thing)
- if not cleaned_data.get('city'):
- self._errors['city'] = self.error_class(['City must be specified for non-online events'])
- del cleaned_data['city']
- if not cleaned_data.get('country'):
- self._errors['country'] = self.error_class(['Country must be specified for non-online events'])
- del cleaned_data['country']
- return cleaned_data
+ def clean(self):
+ cleaned_data = super(EventForm, self).clean()
+ if not cleaned_data.get('isonline'):
+ # Non online events require city and country
+ # (we don't require state, since many countries have no such thing)
+ if not cleaned_data.get('city'):
+ self._errors['city'] = self.error_class(['City must be specified for non-online events'])
+ del cleaned_data['city']
+ if not cleaned_data.get('country'):
+ self._errors['country'] = self.error_class(['Country must be specified for non-online events'])
+ del cleaned_data['country']
+ return cleaned_data
- def clean_startdate(self):
- if self.instance.pk and self.instance.approved:
- if self.cleaned_data['startdate'] != self.instance.startdate:
- raise ValidationError("You cannot change the dates on events that have been approved")
- return self.cleaned_data['startdate']
+ def clean_startdate(self):
+ if self.instance.pk and self.instance.approved:
+ if self.cleaned_data['startdate'] != self.instance.startdate:
+ raise ValidationError("You cannot change the dates on events that have been approved")
+ return self.cleaned_data['startdate']
- def clean_enddate(self):
- if self.instance.pk and self.instance.approved:
- if self.cleaned_data['enddate'] != self.instance.enddate:
- raise ValidationError("You cannot change the dates on events that have been approved")
- if self.cleaned_data.has_key('startdate') and self.cleaned_data['enddate'] < self.cleaned_data['startdate']:
- raise ValidationError("End date cannot be before start date!")
- return self.cleaned_data['enddate']
+ def clean_enddate(self):
+ if self.instance.pk and self.instance.approved:
+ if self.cleaned_data['enddate'] != self.instance.enddate:
+ raise ValidationError("You cannot change the dates on events that have been approved")
+ if self.cleaned_data.has_key('startdate') and self.cleaned_data['enddate'] < self.cleaned_data['startdate']:
+ raise ValidationError("End date cannot be before start date!")
+ return self.cleaned_data['enddate']
- class Meta:
- model = Event
- exclude = ('submitter', 'approved', 'description_for_badged')
+ class Meta:
+ model = Event
+ exclude = ('submitter', 'approved', 'description_for_badged')
diff --git a/pgweb/events/models.py b/pgweb/events/models.py
index 8969afb5..f956d931 100644
--- a/pgweb/events/models.py
+++ b/pgweb/events/models.py
@@ -3,68 +3,68 @@ from django.db import models
from pgweb.core.models import Country, Language, Organisation
class Event(models.Model):
- approved = models.BooleanField(null=False, blank=False, default=False)
+ approved = models.BooleanField(null=False, blank=False, default=False)
- org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.")
- title = models.CharField(max_length=100, null=False, blank=False)
- isonline = models.BooleanField(null=False, default=False, verbose_name="Online event")
- city = models.CharField(max_length=50, null=False, blank=True)
- state = models.CharField(max_length=50, null=False, blank=True)
- country = models.ForeignKey(Country, null=True, blank=True)
- language = models.ForeignKey(Language, null=True, blank=True, default='eng', help_text="Primary language for event. When multiple languages, specify this in the event description")
+ org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.")
+ title = models.CharField(max_length=100, null=False, blank=False)
+ isonline = models.BooleanField(null=False, default=False, verbose_name="Online event")
+ city = models.CharField(max_length=50, null=False, blank=True)
+ state = models.CharField(max_length=50, null=False, blank=True)
+ country = models.ForeignKey(Country, null=True, blank=True)
+ language = models.ForeignKey(Language, null=True, blank=True, default='eng', help_text="Primary language for event. When multiple languages, specify this in the event description")
- badged = models.BooleanField(null=False, blank=False, default=False, verbose_name='Community event', help_text='Choose "Community event" if this is a community recognized event following the community event guidelines.')
- description_for_badged = models.TextField(blank=True, null=True, verbose_name='Description for community event', help_text='DEPRECRATED: This was used in the beginning of community events to collect additional information.')
- startdate = models.DateField(null=False, blank=False, verbose_name="Start date")
- enddate = models.DateField(null=False, blank=False, verbose_name="End date")
+ badged = models.BooleanField(null=False, blank=False, default=False, verbose_name='Community event', help_text='Choose "Community event" if this is a community recognized event following the community event guidelines.')
+ description_for_badged = models.TextField(blank=True, null=True, verbose_name='Description for community event', help_text='DEPRECRATED: This was used in the beginning of community events to collect additional information.')
+ startdate = models.DateField(null=False, blank=False, verbose_name="Start date")
+ enddate = models.DateField(null=False, blank=False, verbose_name="End date")
- summary = models.TextField(blank=False, null=False, help_text="A short introduction (shown on the events listing page)")
- details = models.TextField(blank=False, null=False, help_text="Complete event description")
+ summary = models.TextField(blank=False, null=False, help_text="A short introduction (shown on the events listing page)")
+ details = models.TextField(blank=False, null=False, help_text="Complete event description")
- send_notification = True
- markdown_fields = ('details', 'summary', )
+ send_notification = True
+ markdown_fields = ('details', 'summary', )
- def purge_urls(self):
- yield '/about/event/%s/' % self.pk
- yield '/about/events/'
- yield '/events.rss'
- # FIXME: when to expire the front page?
- yield '/$'
+ def purge_urls(self):
+ yield '/about/event/%s/' % self.pk
+ yield '/about/events/'
+ yield '/events.rss'
+ # FIXME: when to expire the front page?
+ yield '/$'
- def __unicode__(self):
- return "%s: %s" % (self.startdate, self.title)
+ def __unicode__(self):
+ return "%s: %s" % (self.startdate, self.title)
- def verify_submitter(self, user):
- return (len(self.org.managers.filter(pk=user.pk)) == 1)
+ def verify_submitter(self, user):
+ return (len(self.org.managers.filter(pk=user.pk)) == 1)
- @property
- def has_organisation(self):
- mgrs = self.org.managers.all()
- if len(mgrs) == 1:
- if mgrs[0].pk == 0:
- return False # Migration organisation
- else:
- return True # Has an actual organisation
- elif len(mgrs) > 1:
- # More than one manager means it must be new
- return True
- return False # Has no organisastion at all
+ @property
+ def has_organisation(self):
+ mgrs = self.org.managers.all()
+ if len(mgrs) == 1:
+ if mgrs[0].pk == 0:
+ return False # Migration organisation
+ else:
+ return True # Has an actual organisation
+ elif len(mgrs) > 1:
+ # More than one manager means it must be new
+ return True
+ return False # Has no organisastion at all
- @property
- def displaydate(self):
- if self.startdate == self.enddate:
- return self.startdate
- else:
- return "%s – %s" % (self.startdate, self.enddate)
+ @property
+ def displaydate(self):
+ if self.startdate == self.enddate:
+ return self.startdate
+ else:
+ return "%s – %s" % (self.startdate, self.enddate)
- @property
- def locationstring(self):
- if self.isonline:
- return "online"
- elif self.state:
- return "%s, %s, %s" % (self.city, self.state, self.country)
- else:
- return "%s, %s" % (self.city, self.country)
+ @property
+ def locationstring(self):
+ if self.isonline:
+ return "online"
+ elif self.state:
+ return "%s, %s, %s" % (self.city, self.state, self.country)
+ else:
+ return "%s, %s" % (self.city, self.country)
- class Meta:
- ordering = ('-startdate','-enddate',)
+ class Meta:
+ ordering = ('-startdate','-enddate',)
diff --git a/pgweb/events/struct.py b/pgweb/events/struct.py
index 488acb28..e60303dd 100644
--- a/pgweb/events/struct.py
+++ b/pgweb/events/struct.py
@@ -2,16 +2,16 @@ from datetime import date
from models import Event
def get_struct():
- now = date.today()
+ now = date.today()
- # We intentionally don't put /about/eventarchive/ in the sitemap,
- # since we don't care about getting it indexed.
- # We only show events in the future, so only index events in the
- # future...
+ # We intentionally don't put /about/eventarchive/ in the sitemap,
+ # since we don't care about getting it indexed.
+ # We only show events in the future, so only index events in the
+ # future...
- for n in Event.objects.filter(approved=True, enddate__gt=now):
- yearsold = (now - n.startdate).days / 365
- if yearsold > 4:
- yearsold = 4
- yield ('about/event/%s/' % n.id,
- 0.5-(yearsold/10.0))
+ for n in Event.objects.filter(approved=True, enddate__gt=now):
+ yearsold = (now - n.startdate).days / 365
+ if yearsold > 4:
+ yearsold = 4
+ yield ('about/event/%s/' % n.id,
+ 0.5-(yearsold/10.0))
diff --git a/pgweb/events/views.py b/pgweb/events/views.py
index ed00367d..d0fa7da8 100644
--- a/pgweb/events/views.py
+++ b/pgweb/events/views.py
@@ -11,39 +11,39 @@ from models import Event
from forms import EventForm
def main(request):
- community_events = Event.objects.select_related('country').filter(approved=True, badged=True).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
- other_events = Event.objects.select_related('country').filter(approved=True, badged=False).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
- return render_pgweb(request, 'about', 'events/archive.html', {
- 'title': 'Upcoming Events',
- 'eventblocks': (
- { 'name': 'Community Events', 'events': community_events, 'link': '',},
- { 'name': 'Other Events', 'events': other_events, 'link': '',},
- ),
- })
+ community_events = Event.objects.select_related('country').filter(approved=True, badged=True).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
+ other_events = Event.objects.select_related('country').filter(approved=True, badged=False).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
+ return render_pgweb(request, 'about', 'events/archive.html', {
+ 'title': 'Upcoming Events',
+ 'eventblocks': (
+ { 'name': 'Community Events', 'events': community_events, 'link': '',},
+ { 'name': 'Other Events', 'events': other_events, 'link': '',},
+ ),
+ })
def _eventarchive(request, title):
- # Hardcode to the latest 100 events. Do we need paging too?
- events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100]
- return render_pgweb(request, 'about', 'events/archive.html', {
- 'title': '%s Archive' % title,
- 'archive': True,
- 'eventblocks': (
- {'name': title, 'events': events, },
- ),
- })
+ # Hardcode to the latest 100 events. Do we need paging too?
+ events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100]
+ return render_pgweb(request, 'about', 'events/archive.html', {
+ 'title': '%s Archive' % title,
+ 'archive': True,
+ 'eventblocks': (
+ {'name': title, 'events': events, },
+ ),
+ })
def archive(request):
- return _eventarchive(request, 'Event')
+ return _eventarchive(request, 'Event')
def item(request, itemid, throwaway=None):
- event = get_object_or_404(Event, pk=itemid)
- if not event.approved:
- raise Http404
- return render_pgweb(request, 'about', 'events/item.html', {
- 'obj': event,
- })
+ event = get_object_or_404(Event, pk=itemid)
+ if not event.approved:
+ raise Http404
+ return render_pgweb(request, 'about', 'events/item.html', {
+ 'obj': event,
+ })
@login_required
def form(request, itemid):
- return simple_form(Event, itemid, request, EventForm,
- redirect='/account/edit/events/')
+ return simple_form(Event, itemid, request, EventForm,
+ redirect='/account/edit/events/')
diff --git a/pgweb/featurematrix/admin.py b/pgweb/featurematrix/admin.py
index 2aa1b8ce..1581c830 100644
--- a/pgweb/featurematrix/admin.py
+++ b/pgweb/featurematrix/admin.py
@@ -3,17 +3,17 @@ from django.contrib import admin
from models import Feature, FeatureGroup
class FeatureInline(admin.TabularInline):
- model = Feature
+ model = Feature
class FeatureGroupAdmin(admin.ModelAdmin):
- inlines = [FeatureInline, ]
- list_display = ('groupname', 'groupsort')
- ordering = ['groupsort']
+ inlines = [FeatureInline, ]
+ list_display = ('groupname', 'groupsort')
+ ordering = ['groupsort']
class FeatureAdmin(admin.ModelAdmin):
- list_display = ('featurename', 'group')
- list_filter = ('group',)
- search_fields = ('featurename',)
+ list_display = ('featurename', 'group')
+ list_filter = ('group',)
+ search_fields = ('featurename',)
admin.site.register(FeatureGroup, FeatureGroupAdmin)
admin.site.register(Feature, FeatureAdmin)
diff --git a/pgweb/featurematrix/migrations/0002_featurematrix_96.py b/pgweb/featurematrix/migrations/0002_featurematrix_96.py
index df798fc0..833932eb 100644
--- a/pgweb/featurematrix/migrations/0002_featurematrix_96.py
+++ b/pgweb/featurematrix/migrations/0002_featurematrix_96.py
@@ -16,5 +16,5 @@ class Migration(migrations.Migration):
name='v96',
field=models.IntegerField(default=0, verbose_name=b'9.6', choices=[(0, b'No'), (1, b'Yes'), (2, b'Obsolete'), (3, b'?')]),
),
- migrations.RunSQL("UPDATE featurematrix_feature SET v96=v95 WHERE NOT v96=v95"),
+ migrations.RunSQL("UPDATE featurematrix_feature SET v96=v95 WHERE NOT v96=v95"),
]
diff --git a/pgweb/featurematrix/migrations/0003_feature_v10.py b/pgweb/featurematrix/migrations/0003_feature_v10.py
index 6935faf3..182e0770 100644
--- a/pgweb/featurematrix/migrations/0003_feature_v10.py
+++ b/pgweb/featurematrix/migrations/0003_feature_v10.py
@@ -16,5 +16,5 @@ class Migration(migrations.Migration):
name='v10',
field=models.IntegerField(default=0, verbose_name=b'10', choices=[(0, b'No'), (1, b'Yes'), (2, b'Obsolete'), (3, b'?')]),
),
- migrations.RunSQL("UPDATE featurematrix_feature SET v10=v96 WHERE NOT v10=v96"),
+ migrations.RunSQL("UPDATE featurematrix_feature SET v10=v96 WHERE NOT v10=v96"),
]
diff --git a/pgweb/featurematrix/models.py b/pgweb/featurematrix/models.py
index 433e3c3f..9ed6cbd6 100644
--- a/pgweb/featurematrix/models.py
+++ b/pgweb/featurematrix/models.py
@@ -9,58 +9,58 @@ choices_map = {
choices = [(k, v['str']) for k,v in choices_map.items()]
class FeatureGroup(models.Model):
- groupname = models.CharField(max_length=100, null=False, blank=False)
- groupsort = models.IntegerField(null=False, blank=False)
+ groupname = models.CharField(max_length=100, null=False, blank=False)
+ groupsort = models.IntegerField(null=False, blank=False)
- purge_urls = ('/about/featurematrix/', )
+ purge_urls = ('/about/featurematrix/', )
- def __unicode__(self):
- return self.groupname
+ def __unicode__(self):
+ return self.groupname
- @property
- def columns(self):
- # Return a list of all the columns for the matrix
- return [b for a,b in versions]
+ @property
+ def columns(self):
+ # Return a list of all the columns for the matrix
+ return [b for a,b in versions]
class Feature(models.Model):
- group = models.ForeignKey(FeatureGroup, null=False, blank=False)
- featurename = models.CharField(max_length=100, null=False, blank=False)
- featuredescription = models.TextField(null=False, blank=True)
- #WARNING! All fields that start with "v" will be considered versions!
- v74 = models.IntegerField(verbose_name="7.4", null=False, blank=False, default=0, choices=choices)
- v74.visible_default = False
- v80 = models.IntegerField(verbose_name="8.0", null=False, blank=False, default=0, choices=choices)
- v80.visible_default = False
- v81 = models.IntegerField(verbose_name="8.1", null=False, blank=False, default=0, choices=choices)
- v82 = models.IntegerField(verbose_name="8.2", null=False, blank=False, default=0, choices=choices)
- v83 = models.IntegerField(verbose_name="8.3", null=False, blank=False, default=0, choices=choices)
- v84 = models.IntegerField(verbose_name="8.4", null=False, blank=False, default=0, choices=choices)
- v90 = models.IntegerField(verbose_name="9.0", null=False, blank=False, default=0, choices=choices)
- v91 = models.IntegerField(verbose_name="9.1", null=False, blank=False, default=0, choices=choices)
- v92 = models.IntegerField(verbose_name="9.2", null=False, blank=False, default=0, choices=choices)
- v93 = models.IntegerField(verbose_name="9.3", null=False, blank=False, default=0, choices=choices)
- v94 = models.IntegerField(verbose_name="9.4", null=False, blank=False, default=0, choices=choices)
- v95 = models.IntegerField(verbose_name="9.5", null=False, blank=False, default=0, choices=choices)
- v96 = models.IntegerField(verbose_name="9.6", null=False, blank=False, default=0, choices=choices)
- v10 = models.IntegerField(verbose_name="10", null=False, blank=False, default=0, choices=choices)
- v11 = models.IntegerField(verbose_name="11", null=False, blank=False, default=0, choices=choices)
+ group = models.ForeignKey(FeatureGroup, null=False, blank=False)
+ featurename = models.CharField(max_length=100, null=False, blank=False)
+ featuredescription = models.TextField(null=False, blank=True)
+ #WARNING! All fields that start with "v" will be considered versions!
+ v74 = models.IntegerField(verbose_name="7.4", null=False, blank=False, default=0, choices=choices)
+ v74.visible_default = False
+ v80 = models.IntegerField(verbose_name="8.0", null=False, blank=False, default=0, choices=choices)
+ v80.visible_default = False
+ v81 = models.IntegerField(verbose_name="8.1", null=False, blank=False, default=0, choices=choices)
+ v82 = models.IntegerField(verbose_name="8.2", null=False, blank=False, default=0, choices=choices)
+ v83 = models.IntegerField(verbose_name="8.3", null=False, blank=False, default=0, choices=choices)
+ v84 = models.IntegerField(verbose_name="8.4", null=False, blank=False, default=0, choices=choices)
+ v90 = models.IntegerField(verbose_name="9.0", null=False, blank=False, default=0, choices=choices)
+ v91 = models.IntegerField(verbose_name="9.1", null=False, blank=False, default=0, choices=choices)
+ v92 = models.IntegerField(verbose_name="9.2", null=False, blank=False, default=0, choices=choices)
+ v93 = models.IntegerField(verbose_name="9.3", null=False, blank=False, default=0, choices=choices)
+ v94 = models.IntegerField(verbose_name="9.4", null=False, blank=False, default=0, choices=choices)
+ v95 = models.IntegerField(verbose_name="9.5", null=False, blank=False, default=0, choices=choices)
+ v96 = models.IntegerField(verbose_name="9.6", null=False, blank=False, default=0, choices=choices)
+ v10 = models.IntegerField(verbose_name="10", null=False, blank=False, default=0, choices=choices)
+ v11 = models.IntegerField(verbose_name="11", null=False, blank=False, default=0, choices=choices)
- purge_urls = ('/about/featurematrix/.*', )
+ purge_urls = ('/about/featurematrix/.*', )
- def __unicode__(self):
- # To make it look good in the admin interface, just don't render it
- return ''
+ def __unicode__(self):
+ # To make it look good in the admin interface, just don't render it
+ return ''
- def columns(self):
- # Get a list of column based on all versions that are visible_default
- return [choices_map[getattr(self, a)] for a,b in versions]
+ def columns(self):
+ # Get a list of column based on all versions that are visible_default
+ return [choices_map[getattr(self, a)] for a,b in versions]
- @property
- def featurelink(self):
- if self.featuredescription.startswith('https://') or self.featuredescription.startswith('http://'):
- return self.featuredescription
- else:
- return 'detail/%s/' % self.id
+ @property
+ def featurelink(self):
+ if self.featuredescription.startswith('https://') or self.featuredescription.startswith('http://'):
+ return self.featuredescription
+ else:
+ return 'detail/%s/' % self.id
versions = [(f.name,f.verbose_name) for f in Feature()._meta.fields if f.name.startswith('v') and getattr(f, 'visible_default', True)]
versions = sorted(versions, key=lambda f: -float(f[1]))
diff --git a/pgweb/featurematrix/struct.py b/pgweb/featurematrix/struct.py
index c3eef315..3601f227 100644
--- a/pgweb/featurematrix/struct.py
+++ b/pgweb/featurematrix/struct.py
@@ -1,2 +1,2 @@
def get_struct():
- yield ('about/featurematrix/', None)
+ yield ('about/featurematrix/', None)
diff --git a/pgweb/featurematrix/views.py b/pgweb/featurematrix/views.py
index bd445390..a50c9fd0 100644
--- a/pgweb/featurematrix/views.py
+++ b/pgweb/featurematrix/views.py
@@ -6,31 +6,31 @@ from pgweb.core.models import Version
from models import Feature
def root(request):
- features = Feature.objects.all().select_related().order_by('group__groupsort', 'group__groupname', 'featurename')
- groups = []
- lastgroup = -1
- currentgroup = None
- for f in features:
- if f.group.id != lastgroup:
- if currentgroup:
- groups.append(currentgroup)
- lastgroup = f.group.id
- currentgroup = {
- 'group': f.group,
- 'features': [],
- }
- currentgroup['features'].append(f)
- if currentgroup:
- groups.append(currentgroup)
+ features = Feature.objects.all().select_related().order_by('group__groupsort', 'group__groupname', 'featurename')
+ groups = []
+ lastgroup = -1
+ currentgroup = None
+ for f in features:
+ if f.group.id != lastgroup:
+ if currentgroup:
+ groups.append(currentgroup)
+ lastgroup = f.group.id
+ currentgroup = {
+ 'group': f.group,
+ 'features': [],
+ }
+ currentgroup['features'].append(f)
+ if currentgroup:
+ groups.append(currentgroup)
- versions = Version.objects.filter(tree__gte='8.1').order_by('-tree')
- return render_pgweb(request, 'about', 'featurematrix/featurematrix.html', {
- 'groups': groups,
- 'versions': versions,
- })
+ versions = Version.objects.filter(tree__gte='8.1').order_by('-tree')
+ return render_pgweb(request, 'about', 'featurematrix/featurematrix.html', {
+ 'groups': groups,
+ 'versions': versions,
+ })
def detail(request, featureid):
- feature = get_object_or_404(Feature, pk=featureid)
- return render_pgweb(request, 'about', 'featurematrix/featuredetail.html', {
- 'feature': feature,
- })
+ feature = get_object_or_404(Feature, pk=featureid)
+ return render_pgweb(request, 'about', 'featurematrix/featuredetail.html', {
+ 'feature': feature,
+ })
diff --git a/pgweb/legacyurl/views.py b/pgweb/legacyurl/views.py
index 1a7823b2..5fda8fca 100644
--- a/pgweb/legacyurl/views.py
+++ b/pgweb/legacyurl/views.py
@@ -1,5 +1,5 @@
from django.http import HttpResponseRedirect
def mailpref(request, listname):
- # Just redirect to the homepage of pglister, don't try specific lists
- return HttpResponseRedirect("https://lists.postgresql.org/")
+ # Just redirect to the homepage of pglister, don't try specific lists
+ return HttpResponseRedirect("https://lists.postgresql.org/")
diff --git a/pgweb/lists/management/commands/sync_lists.py b/pgweb/lists/management/commands/sync_lists.py
index 53c89633..fe2701a6 100644
--- a/pgweb/lists/management/commands/sync_lists.py
+++ b/pgweb/lists/management/commands/sync_lists.py
@@ -8,52 +8,52 @@ from django.conf import settings
import requests
class Command(BaseCommand):
- help = 'Synchronize mailinglists'
+ help = 'Synchronize mailinglists'
- def add_arguments(self, parser):
- parser.add_argument('--dryrun', action='store_true', help="Don't commit changes")
+ def add_arguments(self, parser):
+ parser.add_argument('--dryrun', action='store_true', help="Don't commit changes")
- def handle(self, *args, **options):
- if settings.ARCHIVES_SEARCH_PLAINTEXT:
- proto="http"
- else:
- proto="https"
- r = requests.get('{0}://{1}/listinfo/'.format(proto, settings.ARCHIVES_SEARCH_SERVER))
- j = r.json()
- allgroups = list(set([l['group'] for l in j]))
- with transaction.atomic():
- curs = connection.cursor()
+ def handle(self, *args, **options):
+ if settings.ARCHIVES_SEARCH_PLAINTEXT:
+ proto="http"
+ else:
+ proto="https"
+ r = requests.get('{0}://{1}/listinfo/'.format(proto, settings.ARCHIVES_SEARCH_SERVER))
+ j = r.json()
+ allgroups = list(set([l['group'] for l in j]))
+ with transaction.atomic():
+ curs = connection.cursor()
- # Add any groups necessary
- curs.execute("INSERT INTO lists_mailinglistgroup (groupname, sortkey) SELECT n,50 FROM UNNEST(%s) n(n) WHERE NOT EXISTS (SELECT 1 FROM lists_mailinglistgroup WHERE groupname=n) RETURNING groupname", (allgroups,))
- for n, in curs.fetchall():
- print "Added group %s" % n
+ # Add any groups necessary
+ curs.execute("INSERT INTO lists_mailinglistgroup (groupname, sortkey) SELECT n,50 FROM UNNEST(%s) n(n) WHERE NOT EXISTS (SELECT 1 FROM lists_mailinglistgroup WHERE groupname=n) RETURNING groupname", (allgroups,))
+ for n, in curs.fetchall():
+ print "Added group %s" % n
- # Add and update lists
- for l in j:
- curs.execute("SELECT id FROM lists_mailinglist WHERE listname=%s", (l['name'],))
- if curs.rowcount == 0:
- curs.execute("INSERT INTO lists_mailinglist (listname, group_id, active, description, shortdesc) VALUES (%s, (SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), %s, %s, %s)", (
- l['name'], l['group'], l['active'], l['description'], l['shortdesc']))
- print "Added list %s" % l['name']
- else:
- curs.execute("UPDATE lists_mailinglist SET group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), active=%s, description=%s, shortdesc=%s WHERE listname=%s AND NOT (group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s) AND active=%s AND description=%s AND shortdesc=%s) RETURNING listname", (
- l['group'], l['active'], l['description'], l['shortdesc'],
- l['name'],
- l['group'], l['active'], l['description'], l['shortdesc'],
- ))
- for n, in curs.fetchall():
- print "Updated list %s" % n
+ # Add and update lists
+ for l in j:
+ curs.execute("SELECT id FROM lists_mailinglist WHERE listname=%s", (l['name'],))
+ if curs.rowcount == 0:
+ curs.execute("INSERT INTO lists_mailinglist (listname, group_id, active, description, shortdesc) VALUES (%s, (SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), %s, %s, %s)", (
+ l['name'], l['group'], l['active'], l['description'], l['shortdesc']))
+ print "Added list %s" % l['name']
+ else:
+ curs.execute("UPDATE lists_mailinglist SET group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), active=%s, description=%s, shortdesc=%s WHERE listname=%s AND NOT (group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s) AND active=%s AND description=%s AND shortdesc=%s) RETURNING listname", (
+ l['group'], l['active'], l['description'], l['shortdesc'],
+ l['name'],
+ l['group'], l['active'], l['description'], l['shortdesc'],
+ ))
+ for n, in curs.fetchall():
+ print "Updated list %s" % n
- # Delete any lists that shouldn't exist anymore (this is safe because we don't keep any data about them,
- # so they are trivial to add back)
- curs.execute("DELETE FROM lists_mailinglist WHERE NOT listname=ANY(%s) RETURNING listname", ([l['name'] for l in j],))
- for n, in curs.fetchall():
- print "Deleted list %s" % n
- # Delete listgroups
- curs.execute("DELETE FROM lists_mailinglistgroup WHERE NOT groupname=ANY(%s) RETURNING groupname", (allgroups,))
- for n, in curs.fetchall():
- print "Deleted group %s" % n
+ # Delete any lists that shouldn't exist anymore (this is safe because we don't keep any data about them,
+ # so they are trivial to add back)
+ curs.execute("DELETE FROM lists_mailinglist WHERE NOT listname=ANY(%s) RETURNING listname", ([l['name'] for l in j],))
+ for n, in curs.fetchall():
+ print "Deleted list %s" % n
+ # Delete listgroups
+ curs.execute("DELETE FROM lists_mailinglistgroup WHERE NOT groupname=ANY(%s) RETURNING groupname", (allgroups,))
+ for n, in curs.fetchall():
+ print "Deleted group %s" % n
- if options['dryrun']:
- raise CommandError("Dry run, rolling back")
+ if options['dryrun']:
+ raise CommandError("Dry run, rolling back")
diff --git a/pgweb/lists/models.py b/pgweb/lists/models.py
index 9c7aa720..0d398c9a 100644
--- a/pgweb/lists/models.py
+++ b/pgweb/lists/models.py
@@ -1,38 +1,38 @@
from django.db import models
class MailingListGroup(models.Model):
- groupname = models.CharField(max_length=64, null=False, blank=False)
- sortkey = models.IntegerField(null=False, default=10)
+ groupname = models.CharField(max_length=64, null=False, blank=False)
+ sortkey = models.IntegerField(null=False, default=10)
- purge_urls = ('/community/lists/', )
+ purge_urls = ('/community/lists/', )
- @property
- def negid(self):
- return -self.id
+ @property
+ def negid(self):
+ return -self.id
- def __unicode__(self):
- return self.groupname
+ def __unicode__(self):
+ return self.groupname
- class Meta:
- ordering = ('sortkey', )
+ class Meta:
+ ordering = ('sortkey', )
class MailingList(models.Model):
- group = models.ForeignKey(MailingListGroup, null=False)
- listname = models.CharField(max_length=64, null=False, blank=False, unique=True)
- active = models.BooleanField(null=False, default=False)
- description = models.TextField(null=False, blank=True)
- shortdesc = models.TextField(null=False, blank=True)
+ group = models.ForeignKey(MailingListGroup, null=False)
+ listname = models.CharField(max_length=64, null=False, blank=False, unique=True)
+ active = models.BooleanField(null=False, default=False)
+ description = models.TextField(null=False, blank=True)
+ shortdesc = models.TextField(null=False, blank=True)
- purge_urls = ('/community/lists/', )
+ purge_urls = ('/community/lists/', )
- @property
- def maybe_shortdesc(self):
- if self.shortdesc:
- return self.shortdesc
- return self.listname
+ @property
+ def maybe_shortdesc(self):
+ if self.shortdesc:
+ return self.shortdesc
+ return self.listname
- def __unicode__(self):
- return self.listname
+ def __unicode__(self):
+ return self.listname
- class Meta:
- ordering = ('listname', )
+ class Meta:
+ ordering = ('listname', )
diff --git a/pgweb/lists/struct.py b/pgweb/lists/struct.py
index e42eb5df..02509b97 100644
--- a/pgweb/lists/struct.py
+++ b/pgweb/lists/struct.py
@@ -1,2 +1,2 @@
def get_struct():
- yield ('community/lists/', None)
+ yield ('community/lists/', None)
diff --git a/pgweb/lists/views.py b/pgweb/lists/views.py
index 46ce8dc4..62db498f 100644
--- a/pgweb/lists/views.py
+++ b/pgweb/lists/views.py
@@ -5,19 +5,19 @@ import json
from models import MailingList, MailingListGroup
def listinfo(request):
- resp = HttpResponse(content_type='application/json')
- groupdata = [ {
- 'id': g.id,
- 'name': g.groupname,
- 'sort': g.sortkey,
- } for g in MailingListGroup.objects.all()]
- listdata = [ {
- 'id': l.id,
- 'name': l.listname,
- 'groupid': l.group_id,
- 'active': l.active,
- 'shortdesc': l.shortdesc,
- 'description': l.description,
- } for l in MailingList.objects.all()]
- json.dump({'groups': groupdata, 'lists': listdata}, resp)
- return resp
+ resp = HttpResponse(content_type='application/json')
+ groupdata = [ {
+ 'id': g.id,
+ 'name': g.groupname,
+ 'sort': g.sortkey,
+ } for g in MailingListGroup.objects.all()]
+ listdata = [ {
+ 'id': l.id,
+ 'name': l.listname,
+ 'groupid': l.group_id,
+ 'active': l.active,
+ 'shortdesc': l.shortdesc,
+ 'description': l.description,
+ } for l in MailingList.objects.all()]
+ json.dump({'groups': groupdata, 'lists': listdata}, resp)
+ return resp
diff --git a/pgweb/mailqueue/admin.py b/pgweb/mailqueue/admin.py
index 50ddc9e3..801d1163 100644
--- a/pgweb/mailqueue/admin.py
+++ b/pgweb/mailqueue/admin.py
@@ -5,26 +5,26 @@ from email.parser import Parser
from models import QueuedMail
class QueuedMailAdmin(admin.ModelAdmin):
- model = QueuedMail
- readonly_fields = ('parsed_content', )
+ model = QueuedMail
+ readonly_fields = ('parsed_content', )
- def parsed_content(self, obj):
- # We only try to parse the *first* piece, because we assume
- # all our emails are trivial.
- try:
- parser = Parser()
- msg = parser.parsestr(obj.fullmsg)
- b = msg.get_payload(decode=True)
- if b: return b
+ def parsed_content(self, obj):
+ # We only try to parse the *first* piece, because we assume
+ # all our emails are trivial.
+ try:
+ parser = Parser()
+ msg = parser.parsestr(obj.fullmsg)
+ b = msg.get_payload(decode=True)
+ if b: return b
- pl = msg.get_payload()
- for p in pl:
- b = p.get_payload(decode=True)
- if b: return b
- return "Could not find body"
- except Exception, e:
- return "Failed to get body: %s" % e
+ pl = msg.get_payload()
+ for p in pl:
+ b = p.get_payload(decode=True)
+ if b: return b
+ return "Could not find body"
+ except Exception, e:
+ return "Failed to get body: %s" % e
- parsed_content.short_description = 'Parsed mail'
+ parsed_content.short_description = 'Parsed mail'
admin.site.register(QueuedMail, QueuedMailAdmin)
diff --git a/pgweb/mailqueue/management/commands/send_queued_mail.py b/pgweb/mailqueue/management/commands/send_queued_mail.py
index 59508185..5e019fb6 100755
--- a/pgweb/mailqueue/management/commands/send_queued_mail.py
+++ b/pgweb/mailqueue/management/commands/send_queued_mail.py
@@ -14,35 +14,35 @@ import smtplib
from pgweb.mailqueue.models import QueuedMail
class Command(BaseCommand):
- help = 'Send queued mail'
+ help = 'Send queued mail'
- def handle(self, *args, **options):
- # Grab advisory lock, if available. Lock id is just a random number
- # since we only need to interlock against ourselves. The lock is
- # automatically released when we're done.
- curs = connection.cursor()
- curs.execute("SELECT pg_try_advisory_lock(72181372)")
- if not curs.fetchall()[0][0]:
- raise CommandError("Failed to get advisory lock, existing send_queued_mail process stuck?")
+ def handle(self, *args, **options):
+ # Grab advisory lock, if available. Lock id is just a random number
+ # since we only need to interlock against ourselves. The lock is
+ # automatically released when we're done.
+ curs = connection.cursor()
+ curs.execute("SELECT pg_try_advisory_lock(72181372)")
+ if not curs.fetchall()[0][0]:
+ raise CommandError("Failed to get advisory lock, existing send_queued_mail process stuck?")
- for m in QueuedMail.objects.all():
- # Yes, we do a new connection for each run. Just because we can.
- # If it fails we'll throw an exception and just come back on the
- # next cron job. And local delivery should never fail...
- if m.usergenerated:
- # User generated email gets relayed directly over a frontend
- smtphost = settings.FRONTEND_SMTP_RELAY
- else:
- smtphost = 'localhost'
- smtp = smtplib.SMTP(smtphost)
- try:
- smtp.sendmail(m.sender, m.receiver, m.fullmsg.encode('utf-8'))
- except (smtplib.SMTPSenderRefused, smtplib.SMTPRecipientsRefused, smtplib.SMTPDataError):
- # If this was user generated, this indicates the antispam
- # kicking in, so we just ignore it. If it's anything else,
- # we want to let the exception through.
- if not m.usergenerated:
- raise
+ for m in QueuedMail.objects.all():
+ # Yes, we do a new connection for each run. Just because we can.
+ # If it fails we'll throw an exception and just come back on the
+ # next cron job. And local delivery should never fail...
+ if m.usergenerated:
+ # User generated email gets relayed directly over a frontend
+ smtphost = settings.FRONTEND_SMTP_RELAY
+ else:
+ smtphost = 'localhost'
+ smtp = smtplib.SMTP(smtphost)
+ try:
+ smtp.sendmail(m.sender, m.receiver, m.fullmsg.encode('utf-8'))
+ except (smtplib.SMTPSenderRefused, smtplib.SMTPRecipientsRefused, smtplib.SMTPDataError):
+ # If this was user generated, this indicates the antispam
+ # kicking in, so we just ignore it. If it's anything else,
+ # we want to let the exception through.
+ if not m.usergenerated:
+ raise
- smtp.close()
- m.delete()
+ smtp.close()
+ m.delete()
diff --git a/pgweb/mailqueue/models.py b/pgweb/mailqueue/models.py
index 36d73eee..10c50f3d 100644
--- a/pgweb/mailqueue/models.py
+++ b/pgweb/mailqueue/models.py
@@ -1,14 +1,14 @@
from django.db import models
class QueuedMail(models.Model):
- sender = models.EmailField(max_length=100, null=False, blank=False)
- receiver = models.EmailField(max_length=100, null=False, blank=False)
- # We store the raw MIME message, so if there are any attachments or
- # anything, we just push them right in there!
- fullmsg = models.TextField(null=False, blank=False)
- # Flag if the message is "user generated", so we can treat those
- # separately from an antispam and delivery perspective.
- usergenerated = models.BooleanField(null=False, blank=False, default=False)
+ sender = models.EmailField(max_length=100, null=False, blank=False)
+ receiver = models.EmailField(max_length=100, null=False, blank=False)
+ # We store the raw MIME message, so if there are any attachments or
+ # anything, we just push them right in there!
+ fullmsg = models.TextField(null=False, blank=False)
+ # Flag if the message is "user generated", so we can treat those
+ # separately from an antispam and delivery perspective.
+ usergenerated = models.BooleanField(null=False, blank=False, default=False)
- def __unicode__(self):
- return "%s: %s -> %s" % (self.pk, self.sender, self.receiver)
+ def __unicode__(self):
+ return "%s: %s -> %s" % (self.pk, self.sender, self.receiver)
diff --git a/pgweb/mailqueue/util.py b/pgweb/mailqueue/util.py
index 9f09e728..71c63350 100644
--- a/pgweb/mailqueue/util.py
+++ b/pgweb/mailqueue/util.py
@@ -9,49 +9,49 @@ from email.header import Header
from models import QueuedMail
def _encoded_email_header(name, email):
- if name:
- return formataddr((str(Header(name, 'utf-8')), email))
- return email
+ if name:
+ return formataddr((str(Header(name, 'utf-8')), email))
+ return email
def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, usergenerated=False, cc=None, replyto=None, sendername=None, receivername=None, messageid=None):
- # attachment format, each is a tuple of (name, mimetype,contents)
- # content should be *binary* and not base64 encoded, since we need to
- # use the base64 routines from the email library to get a properly
- # formatted output message
- msg = MIMEMultipart()
- msg['Subject'] = subject
- msg['To'] = _encoded_email_header(receivername, receiver)
- msg['From'] = _encoded_email_header(sendername, sender)
- if cc:
- msg['Cc'] = cc
- if replyto:
- msg['Reply-To'] = replyto
- msg['Date'] = formatdate(localtime=True)
- if messageid:
- msg['Message-ID'] = messageid
- else:
- msg['Message-ID'] = make_msgid()
+ # attachment format, each is a tuple of (name, mimetype,contents)
+ # content should be *binary* and not base64 encoded, since we need to
+ # use the base64 routines from the email library to get a properly
+ # formatted output message
+ msg = MIMEMultipart()
+ msg['Subject'] = subject
+ msg['To'] = _encoded_email_header(receivername, receiver)
+ msg['From'] = _encoded_email_header(sendername, sender)
+ if cc:
+ msg['Cc'] = cc
+ if replyto:
+ msg['Reply-To'] = replyto
+ msg['Date'] = formatdate(localtime=True)
+ if messageid:
+ msg['Message-ID'] = messageid
+ else:
+ msg['Message-ID'] = make_msgid()
- msg.attach(MIMEText(msgtxt, _charset='utf-8'))
+ msg.attach(MIMEText(msgtxt, _charset='utf-8'))
- if attachments:
- for filename, contenttype, content in attachments:
- main,sub = contenttype.split('/')
- part = MIMENonMultipart(main,sub)
- part.set_payload(content)
- part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename)
- encoders.encode_base64(part)
- msg.attach(part)
+ if attachments:
+ for filename, contenttype, content in attachments:
+ main,sub = contenttype.split('/')
+ part = MIMENonMultipart(main,sub)
+ part.set_payload(content)
+ part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename)
+ encoders.encode_base64(part)
+ msg.attach(part)
- # Just write it to the queue, so it will be transactionally rolled back
- QueuedMail(sender=sender, receiver=receiver, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
- if cc:
- # Write a second copy for the cc, wihch will be delivered
- # directly to the recipient. (The sender doesn't parse the
- # message content to extract cc fields).
- QueuedMail(sender=sender, receiver=cc, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
+ # Just write it to the queue, so it will be transactionally rolled back
+ QueuedMail(sender=sender, receiver=receiver, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
+ if cc:
+ # Write a second copy for the cc, wihch will be delivered
+ # directly to the recipient. (The sender doesn't parse the
+ # message content to extract cc fields).
+ QueuedMail(sender=sender, receiver=cc, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
def send_mail(sender, receiver, fullmsg, usergenerated=False):
- # Send an email, prepared as the full MIME encoded mail already
- QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg, usergenerated=False).save()
+ # Send an email, prepared as the full MIME encoded mail already
+ QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg, usergenerated=False).save()
diff --git a/pgweb/misc/forms.py b/pgweb/misc/forms.py
index 7ddd57f1..b8b46b89 100644
--- a/pgweb/misc/forms.py
+++ b/pgweb/misc/forms.py
@@ -4,30 +4,30 @@ from django.db.models import Q
from pgweb.core.models import Version
class _version_choices():
- def __iter__(self):
- yield ('-1', '** Select version')
- q = Q(supported=True) | Q(testing__gt=0)
- for v in Version.objects.filter(q):
- for minor in range(v.latestminor,-1,-1):
- if not v.testing or minor>0:
- # For beta/rc versions, there is no beta0, so exclude it
- s = v.buildversionstring(minor)
- yield (s,s)
- yield ('Unsupported/Unknown', 'Unsupported/Unknown')
+ def __iter__(self):
+ yield ('-1', '** Select version')
+ q = Q(supported=True) | Q(testing__gt=0)
+ for v in Version.objects.filter(q):
+ for minor in range(v.latestminor,-1,-1):
+ if not v.testing or minor>0:
+ # For beta/rc versions, there is no beta0, so exclude it
+ s = v.buildversionstring(minor)
+ yield (s,s)
+ yield ('Unsupported/Unknown', 'Unsupported/Unknown')
class SubmitBugForm(forms.Form):
- name = forms.CharField(max_length=100, required=True)
- email = forms.EmailField(max_length=100, required=True)
- pgversion = forms.CharField(max_length=20, required=True,
- label="PostgreSQL version",
- widget=forms.Select(choices=_version_choices()))
- os = forms.CharField(max_length=50, required=True,
- label="Operating system")
- shortdesc = forms.CharField(max_length=100, required=True,
- label="Short description")
- details = forms.CharField(required=True, widget=forms.Textarea)
+ name = forms.CharField(max_length=100, required=True)
+ email = forms.EmailField(max_length=100, required=True)
+ pgversion = forms.CharField(max_length=20, required=True,
+ label="PostgreSQL version",
+ widget=forms.Select(choices=_version_choices()))
+ os = forms.CharField(max_length=50, required=True,
+ label="Operating system")
+ shortdesc = forms.CharField(max_length=100, required=True,
+ label="Short description")
+ details = forms.CharField(required=True, widget=forms.Textarea)
- def clean_pgversion(self):
- if self.cleaned_data.get('pgversion') == '-1':
- raise forms.ValidationError('You must select a version')
- return self.cleaned_data.get('pgversion')
+ def clean_pgversion(self):
+ if self.cleaned_data.get('pgversion') == '-1':
+ raise forms.ValidationError('You must select a version')
+ return self.cleaned_data.get('pgversion')
diff --git a/pgweb/misc/models.py b/pgweb/misc/models.py
index 90c7e240..58148e70 100644
--- a/pgweb/misc/models.py
+++ b/pgweb/misc/models.py
@@ -1,7 +1,7 @@
from django.db import models
class BugIdMap(models.Model):
- # Explicit id field because we don't want a SERIAL here, since we generate
- # the actual bug IDs externally.
- id = models.IntegerField(null=False, blank=False, primary_key=True)
- messageid = models.CharField(max_length=500, null=False, blank=False)
+ # Explicit id field because we don't want a SERIAL here, since we generate
+ # the actual bug IDs externally.
+ id = models.IntegerField(null=False, blank=False, primary_key=True)
+ messageid = models.CharField(max_length=500, null=False, blank=False)
diff --git a/pgweb/misc/views.py b/pgweb/misc/views.py
index aca74eed..9656da81 100644
--- a/pgweb/misc/views.py
+++ b/pgweb/misc/views.py
@@ -18,76 +18,76 @@ from pgweb.misc.models import BugIdMap
from forms import SubmitBugForm
def _make_bugs_messageid(bugid):
- return "<{0}-{1}@postgresql.org>".format(
- bugid,
- hashlib.md5("{0}-{1}".format(os.getpid(), time.time())).hexdigest()[:16],
- )
+ return "<{0}-{1}@postgresql.org>".format(
+ bugid,
+ hashlib.md5("{0}-{1}".format(os.getpid(), time.time())).hexdigest()[:16],
+ )
@login_required
def submitbug(request):
- if request.method == 'POST':
- form = SubmitBugForm(request.POST)
- if form.is_valid():
- with transaction.atomic():
- c = connection.cursor()
- c.execute("SELECT nextval('bug_id_seq')")
- bugid = c.fetchall()[0][0]
+ if request.method == 'POST':
+ form = SubmitBugForm(request.POST)
+ if form.is_valid():
+ with transaction.atomic():
+ c = connection.cursor()
+ c.execute("SELECT nextval('bug_id_seq')")
+ bugid = c.fetchall()[0][0]
- messageid = _make_bugs_messageid(bugid)
+ messageid = _make_bugs_messageid(bugid)
- BugIdMap(id=bugid, messageid=messageid.strip('<>')).save()
+ BugIdMap(id=bugid, messageid=messageid.strip('<>')).save()
- send_template_mail(
- settings.BUGREPORT_NOREPLY_EMAIL,
- settings.BUGREPORT_EMAIL,
- 'BUG #%s: %s' % (bugid, form.cleaned_data['shortdesc']),
- 'misc/bugmail.txt',
- {
- 'bugid': bugid,
- 'bug': form.cleaned_data,
- },
- usergenerated=True,
- cc=form.cleaned_data['email'],
- replyto='%s, %s' % (form.cleaned_data['email'], settings.BUGREPORT_EMAIL),
- sendername="PG Bug reporting form",
- messageid=messageid,
- )
+ send_template_mail(
+ settings.BUGREPORT_NOREPLY_EMAIL,
+ settings.BUGREPORT_EMAIL,
+ 'BUG #%s: %s' % (bugid, form.cleaned_data['shortdesc']),
+ 'misc/bugmail.txt',
+ {
+ 'bugid': bugid,
+ 'bug': form.cleaned_data,
+ },
+ usergenerated=True,
+ cc=form.cleaned_data['email'],
+ replyto='%s, %s' % (form.cleaned_data['email'], settings.BUGREPORT_EMAIL),
+ sendername="PG Bug reporting form",
+ messageid=messageid,
+ )
- return HttpResponseRedirect("/account/submitbug/{0}/".format(bugid))
- else:
- form = SubmitBugForm(initial={
- 'name': '%s %s' % (request.user.first_name, request.user.last_name),
- 'email': request.user.email,
- })
+ return HttpResponseRedirect("/account/submitbug/{0}/".format(bugid))
+ else:
+ form = SubmitBugForm(initial={
+ 'name': '%s %s' % (request.user.first_name, request.user.last_name),
+ 'email': request.user.email,
+ })
- versions = Version.objects.filter(supported=True)
+ versions = Version.objects.filter(supported=True)
- return render_pgweb(request, 'support', 'base/form.html', {
- 'form': form,
- 'formitemtype': 'bug report',
- 'formtitle': 'Submit Bug Report ',
- 'operation': 'Submit',
- 'form_intro': template_to_string('misc/bug_header.html', {
- 'supportedversions': versions,
- }),
- 'savebutton': 'Submit and Send Email',
- })
+ return render_pgweb(request, 'support', 'base/form.html', {
+ 'form': form,
+ 'formitemtype': 'bug report',
+ 'formtitle': 'Submit Bug Report ',
+ 'operation': 'Submit',
+ 'form_intro': template_to_string('misc/bug_header.html', {
+ 'supportedversions': versions,
+ }),
+ 'savebutton': 'Submit and Send Email',
+ })
@login_required
def submitbug_done(request, bugid):
- return render_pgweb(request, 'support', 'misc/bug_completed.html', {
- 'bugid': bugid,
- })
+ return render_pgweb(request, 'support', 'misc/bug_completed.html', {
+ 'bugid': bugid,
+ })
def bugs_redir(request, bugid):
- r = get_object_or_404(BugIdMap, id=bugid)
+ r = get_object_or_404(BugIdMap, id=bugid)
- return HttpResponseRedirect("{0}/message-id/{1}".format(settings.SITE_ROOT, r.messageid))
+ return HttpResponseRedirect("{0}/message-id/{1}".format(settings.SITE_ROOT, r.messageid))
# A crash testing URL. If the file /tmp/crashtest exists, raise a http 500
# error. Otherwise, just return a fixed text response
def crashtest(request):
- if os.path.exists('/tmp/crashtest'):
- raise Exception('This is a manual test of a crash!')
- else:
- return HttpResponse('Crash testing disabled', content_type='text/plain')
+ if os.path.exists('/tmp/crashtest'):
+ raise Exception('This is a manual test of a crash!')
+ else:
+ return HttpResponse('Crash testing disabled', content_type='text/plain')
diff --git a/pgweb/news/admin.py b/pgweb/news/admin.py
index 93ca0dfc..44734b7c 100644
--- a/pgweb/news/admin.py
+++ b/pgweb/news/admin.py
@@ -4,21 +4,21 @@ from pgweb.util.admin import PgwebAdmin
from models import NewsArticle, NewsTag
class NewsArticleAdmin(PgwebAdmin):
- list_display = ('title', 'org', 'date', 'approved', )
- list_filter = ('approved', )
- filter_horizontal = ('tags', )
- search_fields = ('content', 'title', )
- change_form_template = 'admin/news/newsarticle/change_form.html'
+ list_display = ('title', 'org', 'date', 'approved', )
+ list_filter = ('approved', )
+ filter_horizontal = ('tags', )
+ search_fields = ('content', 'title', )
+ change_form_template = 'admin/news/newsarticle/change_form.html'
- def change_view(self, request, object_id, extra_context=None):
- newsarticle = NewsArticle.objects.get(pk=object_id)
- my_context = {
- 'latest': NewsArticle.objects.filter(org=newsarticle.org)[:10]
- }
- return super(NewsArticleAdmin, self).change_view(request, object_id, extra_context=my_context)
+ def change_view(self, request, object_id, extra_context=None):
+ newsarticle = NewsArticle.objects.get(pk=object_id)
+ my_context = {
+ 'latest': NewsArticle.objects.filter(org=newsarticle.org)[:10]
+ }
+ return super(NewsArticleAdmin, self).change_view(request, object_id, extra_context=my_context)
class NewsTagAdmin(PgwebAdmin):
- list_display = ('urlname', 'name', 'description')
+ list_display = ('urlname', 'name', 'description')
admin.site.register(NewsArticle, NewsArticleAdmin)
admin.site.register(NewsTag, NewsTagAdmin)
diff --git a/pgweb/news/feeds.py b/pgweb/news/feeds.py
index 6c77fe3c..b28ad8c7 100644
--- a/pgweb/news/feeds.py
+++ b/pgweb/news/feeds.py
@@ -5,23 +5,23 @@ from models import NewsArticle
from datetime import datetime, time
class NewsFeed(Feed):
- title = description = "PostgreSQL news"
- link = "https://www.postgresql.org/"
+ title = description = "PostgreSQL news"
+ link = "https://www.postgresql.org/"
- description_template = 'news/rss_description.html'
- title_template = 'news/rss_title.html'
+ description_template = 'news/rss_description.html'
+ title_template = 'news/rss_title.html'
- def get_object(self, request, tagurl=None):
- return tagurl
+ def get_object(self, request, tagurl=None):
+ return tagurl
- def items(self, obj):
- if obj:
- return NewsArticle.objects.filter(approved=True, tags__urlname=obj)[:10]
- else:
- return NewsArticle.objects.filter(approved=True)[:10]
+ def items(self, obj):
+ if obj:
+ return NewsArticle.objects.filter(approved=True, tags__urlname=obj)[:10]
+ else:
+ return NewsArticle.objects.filter(approved=True)[:10]
- def item_link(self, obj):
- return "https://www.postgresql.org/about/news/%s/" % obj.id
+ def item_link(self, obj):
+ return "https://www.postgresql.org/about/news/%s/" % obj.id
- def item_pubdate(self, obj):
- return datetime.combine(obj.date,time.min)
+ def item_pubdate(self, obj):
+ return datetime.combine(obj.date,time.min)
diff --git a/pgweb/news/forms.py b/pgweb/news/forms.py
index a779db01..a711cfae 100644
--- a/pgweb/news/forms.py
+++ b/pgweb/news/forms.py
@@ -5,25 +5,25 @@ from pgweb.core.models import Organisation
from models import NewsArticle, NewsTag
class NewsArticleForm(forms.ModelForm):
- def __init__(self, *args, **kwargs):
- super(NewsArticleForm, self).__init__(*args, **kwargs)
- def filter_by_user(self, user):
- self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
- def clean_date(self):
- if self.instance.pk and self.instance.approved:
- if self.cleaned_data['date'] != self.instance.date:
- raise ValidationError("You cannot change the date on an article that has been approved")
- return self.cleaned_data['date']
+ def __init__(self, *args, **kwargs):
+ super(NewsArticleForm, self).__init__(*args, **kwargs)
+ def filter_by_user(self, user):
+ self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+ def clean_date(self):
+ if self.instance.pk and self.instance.approved:
+ if self.cleaned_data['date'] != self.instance.date:
+ raise ValidationError("You cannot change the date on an article that has been approved")
+ return self.cleaned_data['date']
- @property
- def described_checkboxes(self):
- return {
- 'tags': {t.id: t.description for t in NewsTag.objects.all()}
- }
+ @property
+ def described_checkboxes(self):
+ return {
+ 'tags': {t.id: t.description for t in NewsTag.objects.all()}
+ }
- class Meta:
- model = NewsArticle
- exclude = ('submitter', 'approved', 'tweeted')
- widgets = {
- 'tags': forms.CheckboxSelectMultiple,
- }
+ class Meta:
+ model = NewsArticle
+ exclude = ('submitter', 'approved', 'tweeted')
+ widgets = {
+ 'tags': forms.CheckboxSelectMultiple,
+ }
diff --git a/pgweb/news/management/commands/twitter_post.py b/pgweb/news/management/commands/twitter_post.py
index 73e5b2c5..ce49b24c 100644
--- a/pgweb/news/management/commands/twitter_post.py
+++ b/pgweb/news/management/commands/twitter_post.py
@@ -16,33 +16,33 @@ from pgweb.news.models import NewsArticle
import requests_oauthlib
class Command(BaseCommand):
- help = 'Post to twitter'
+ help = 'Post to twitter'
- def handle(self, *args, **options):
- curs = connection.cursor()
- curs.execute("SELECT pg_try_advisory_lock(62387372)")
- if not curs.fetchall()[0][0]:
- raise CommandError("Failed to get advisory lock, existing twitter_post process stuck?")
+ def handle(self, *args, **options):
+ curs = connection.cursor()
+ curs.execute("SELECT pg_try_advisory_lock(62387372)")
+ if not curs.fetchall()[0][0]:
+ raise CommandError("Failed to get advisory lock, existing twitter_post process stuck?")
- articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now()-timedelta(days=7)).order_by('date'))
- if not len(articles):
- return
+ articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now()-timedelta(days=7)).order_by('date'))
+ if not len(articles):
+ return
- tw = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT,
- settings.TWITTER_CLIENTSECRET,
- settings.TWITTER_TOKEN,
- settings.TWITTER_TOKENSECRET)
+ tw = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT,
+ settings.TWITTER_CLIENTSECRET,
+ settings.TWITTER_TOKEN,
+ settings.TWITTER_TOKENSECRET)
- for a in articles:
- # We hardcode 30 chars for the URL shortener. And then 10 to cover the intro and spacing.
- statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140-40], settings.SITE_ROOT, a.id)
- r = tw.post('https://api.twitter.com/1.1/statuses/update.json', data={
- 'status': statusstr,
- })
- if r.status_code != 200:
- print("Failed to post to twitter: %s " % r)
- else:
- a.tweeted = True
- a.save()
- # Don't post more often than once / 30 seconds, to not trigger flooding.
- time.sleep(30)
+ for a in articles:
+ # We hardcode 30 chars for the URL shortener. And then 10 to cover the intro and spacing.
+ statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140-40], settings.SITE_ROOT, a.id)
+ r = tw.post('https://api.twitter.com/1.1/statuses/update.json', data={
+ 'status': statusstr,
+ })
+ if r.status_code != 200:
+ print("Failed to post to twitter: %s " % r)
+ else:
+ a.tweeted = True
+ a.save()
+ # Don't post more often than once / 30 seconds, to not trigger flooding.
+ time.sleep(30)
diff --git a/pgweb/news/management/commands/twitter_register.py b/pgweb/news/management/commands/twitter_register.py
index 46c100db..6914d10f 100644
--- a/pgweb/news/management/commands/twitter_register.py
+++ b/pgweb/news/management/commands/twitter_register.py
@@ -10,34 +10,34 @@ from django.conf import settings
import requests_oauthlib
class Command(BaseCommand):
- help = 'Register with twitter oauth'
+ help = 'Register with twitter oauth'
- def handle(self, *args, **options):
- if not hasattr(settings, 'TWITTER_CLIENT'):
- raise CommandError("TWITTER_CLIENT must be set in settings_local.py")
- if not hasattr(settings, 'TWITTER_CLIENTSECRET'):
- raise CommandError("TWITTER_CLIENTSECRET must be set in settings_local.py")
- if hasattr(settings, 'TWITTER_TOKEN'):
- raise CommandError("TWITTER_TOKEN is already set in settings_local.py")
- if hasattr(settings, 'TWITTER_TOKENSECRET'):
- raise CommandError("TWITTER_TOKENSECRET is already set in settings_local.py")
+ def handle(self, *args, **options):
+ if not hasattr(settings, 'TWITTER_CLIENT'):
+ raise CommandError("TWITTER_CLIENT must be set in settings_local.py")
+ if not hasattr(settings, 'TWITTER_CLIENTSECRET'):
+ raise CommandError("TWITTER_CLIENTSECRET must be set in settings_local.py")
+ if hasattr(settings, 'TWITTER_TOKEN'):
+ raise CommandError("TWITTER_TOKEN is already set in settings_local.py")
+ if hasattr(settings, 'TWITTER_TOKENSECRET'):
+ raise CommandError("TWITTER_TOKENSECRET is already set in settings_local.py")
- # OK, now we're good to go :)
- oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, settings.TWITTER_CLIENTSECRET)
- fetch_response = oauth.fetch_request_token('https://api.twitter.com/oauth/request_token')
+ # OK, now we're good to go :)
+ oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, settings.TWITTER_CLIENTSECRET)
+ fetch_response = oauth.fetch_request_token('https://api.twitter.com/oauth/request_token')
- authorization_url = oauth.authorization_url('https://api.twitter.com/oauth/authorize')
- print 'Please go here and authorize: %s' % authorization_url
+ authorization_url = oauth.authorization_url('https://api.twitter.com/oauth/authorize')
+ print 'Please go here and authorize: %s' % authorization_url
- pin = raw_input('Paste the PIN here: ')
+ pin = raw_input('Paste the PIN here: ')
- oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT,
- settings.TWITTER_CLIENTSECRET,
- resource_owner_key=fetch_response.get('oauth_token'),
- resource_owner_secret=fetch_response.get('oauth_token_secret'),
- verifier=pin)
- oauth_tokens = oauth.fetch_access_token('https://api.twitter.com/oauth/access_token')
+ oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT,
+ settings.TWITTER_CLIENTSECRET,
+ resource_owner_key=fetch_response.get('oauth_token'),
+ resource_owner_secret=fetch_response.get('oauth_token_secret'),
+ verifier=pin)
+ oauth_tokens = oauth.fetch_access_token('https://api.twitter.com/oauth/access_token')
- print("Authorized. Please configure:")
- print("TWITTER_TOKEN='%s'" % oauth_tokens.get('oauth_token'))
- print("TWITTER_TOKENSECRET='%s'" % oauth_tokens.get('oauth_token_secret'))
+ print("Authorized. Please configure:")
+ print("TWITTER_TOKEN='%s'" % oauth_tokens.get('oauth_token'))
+ print("TWITTER_TOKENSECRET='%s'" % oauth_tokens.get('oauth_token_secret'))
diff --git a/pgweb/news/models.py b/pgweb/news/models.py
index 2c25489a..d31f64e8 100644
--- a/pgweb/news/models.py
+++ b/pgweb/news/models.py
@@ -3,51 +3,51 @@ from datetime import date
from pgweb.core.models import Organisation
class NewsTag(models.Model):
- urlname = models.CharField(max_length=20, null=False, blank=False, unique=True)
- name = models.CharField(max_length=32, null=False, blank=False)
- description = models.CharField(max_length=200, null=False, blank=False)
+ urlname = models.CharField(max_length=20, null=False, blank=False, unique=True)
+ name = models.CharField(max_length=32, null=False, blank=False)
+ description = models.CharField(max_length=200, null=False, blank=False)
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
- class Meta:
- ordering = ('urlname', )
+ class Meta:
+ ordering = ('urlname', )
class NewsArticle(models.Model):
- org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.")
- approved = models.BooleanField(null=False, blank=False, default=False)
- date = models.DateField(null=False, blank=False, default=date.today)
- title = models.CharField(max_length=200, null=False, blank=False)
- content = models.TextField(null=False, blank=False)
- tweeted = models.BooleanField(null=False, blank=False, default=False)
- tags = models.ManyToManyField(NewsTag, blank=False, help_text="Hover mouse over tags to view full description")
+ org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.")
+ approved = models.BooleanField(null=False, blank=False, default=False)
+ date = models.DateField(null=False, blank=False, default=date.today)
+ title = models.CharField(max_length=200, null=False, blank=False)
+ content = models.TextField(null=False, blank=False)
+ tweeted = models.BooleanField(null=False, blank=False, default=False)
+ tags = models.ManyToManyField(NewsTag, blank=False, help_text="Hover mouse over tags to view full description")
- send_notification = True
- send_m2m_notification = True
- markdown_fields = ('content',)
+ send_notification = True
+ send_m2m_notification = True
+ markdown_fields = ('content',)
- def purge_urls(self):
- yield '/about/news/%s/' % self.pk
- yield '/about/newsarchive/'
- yield '/news.rss'
- yield '/news/.*.rss'
- # FIXME: when to expire the front page?
- yield '/$'
+ def purge_urls(self):
+ yield '/about/news/%s/' % self.pk
+ yield '/about/newsarchive/'
+ yield '/news.rss'
+ yield '/news/.*.rss'
+ # FIXME: when to expire the front page?
+ yield '/$'
- def __unicode__(self):
- return "%s: %s" % (self.date, self.title)
+ def __unicode__(self):
+ return "%s: %s" % (self.date, self.title)
- def verify_submitter(self, user):
- return (len(self.org.managers.filter(pk=user.pk)) == 1)
+ def verify_submitter(self, user):
+ return (len(self.org.managers.filter(pk=user.pk)) == 1)
- def is_migrated(self):
- if self.org.pk == 0:
- return True
- return False
+ def is_migrated(self):
+ if self.org.pk == 0:
+ return True
+ return False
- @property
- def displaydate(self):
- return self.date.strftime("%Y-%m-%d")
+ @property
+ def displaydate(self):
+ return self.date.strftime("%Y-%m-%d")
- class Meta:
- ordering = ('-date',)
+ class Meta:
+ ordering = ('-date',)
diff --git a/pgweb/news/struct.py b/pgweb/news/struct.py
index 2c06c874..4c49a196 100644
--- a/pgweb/news/struct.py
+++ b/pgweb/news/struct.py
@@ -2,16 +2,16 @@ from datetime import date, timedelta
from models import NewsArticle
def get_struct():
- now = date.today()
- fouryearsago = date.today() - timedelta(4*365, 0, 0)
+ now = date.today()
+ fouryearsago = date.today() - timedelta(4*365, 0, 0)
- # We intentionally don't put /about/newsarchive/ in the sitemap,
- # since we don't care about getting it indexed.
- # Also, don't bother indexing anything > 4 years old
+ # We intentionally don't put /about/newsarchive/ in the sitemap,
+ # since we don't care about getting it indexed.
+ # Also, don't bother indexing anything > 4 years old
- for n in NewsArticle.objects.filter(approved=True, date__gt=fouryearsago):
- yearsold = (now - n.date).days / 365
- if yearsold > 4:
- yearsold = 4
- yield ('about/news/%s/' % n.id,
- 0.5-(yearsold/10.0))
+ for n in NewsArticle.objects.filter(approved=True, date__gt=fouryearsago):
+ yearsold = (now - n.date).days / 365
+ if yearsold > 4:
+ yearsold = 4
+ yield ('about/news/%s/' % n.id,
+ 0.5-(yearsold/10.0))
diff --git a/pgweb/news/views.py b/pgweb/news/views.py
index 94ac1e6c..a055cd17 100644
--- a/pgweb/news/views.py
+++ b/pgweb/news/views.py
@@ -11,34 +11,34 @@ from forms import NewsArticleForm
import json
def archive(request, tag=None, paging=None):
- if tag:
- tag = get_object_or_404(NewsTag,urlname=tag.strip('/'))
- news = NewsArticle.objects.filter(approved=True, tags=tag)
- else:
- tag = None
- news = NewsArticle.objects.filter(approved=True)
- return render_pgweb(request, 'about', 'news/newsarchive.html', {
- 'news': news,
- 'tag': tag,
- 'newstags': NewsTag.objects.all(),
- })
+ if tag:
+ tag = get_object_or_404(NewsTag,urlname=tag.strip('/'))
+ news = NewsArticle.objects.filter(approved=True, tags=tag)
+ else:
+ tag = None
+ news = NewsArticle.objects.filter(approved=True)
+ return render_pgweb(request, 'about', 'news/newsarchive.html', {
+ 'news': news,
+ 'tag': tag,
+ 'newstags': NewsTag.objects.all(),
+ })
def item(request, itemid, throwaway=None):
- news = get_object_or_404(NewsArticle, pk=itemid)
- if not news.approved:
- raise Http404
- return render_pgweb(request, 'about', 'news/item.html', {
- 'obj': news,
- 'newstags': NewsTag.objects.all(),
- })
+ news = get_object_or_404(NewsArticle, pk=itemid)
+ if not news.approved:
+ raise Http404
+ return render_pgweb(request, 'about', 'news/item.html', {
+ 'obj': news,
+ 'newstags': NewsTag.objects.all(),
+ })
def taglist_json(request):
- return HttpResponse(json.dumps({
- 'tags': [{'name': t.urlname, 'description': t.description} for t in NewsTag.objects.distinct('urlname')],
- }), content_type='application/json')
+ return HttpResponse(json.dumps({
+ 'tags': [{'name': t.urlname, 'description': t.description} for t in NewsTag.objects.distinct('urlname')],
+ }), content_type='application/json')
@login_required
def form(request, itemid):
- return simple_form(NewsArticle, itemid, request, NewsArticleForm,
- redirect='/account/edit/news/')
+ return simple_form(NewsArticle, itemid, request, NewsArticleForm,
+ redirect='/account/edit/news/')
diff --git a/pgweb/profserv/admin.py b/pgweb/profserv/admin.py
index 8ab8563e..b644146c 100644
--- a/pgweb/profserv/admin.py
+++ b/pgweb/profserv/admin.py
@@ -4,8 +4,8 @@ from pgweb.util.admin import PgwebAdmin
from models import ProfessionalService
class ProfessionalServiceAdmin(PgwebAdmin):
- list_display = ('__unicode__', 'approved',)
- list_filter = ('approved',)
- search_fields = ('org__name',)
+ list_display = ('__unicode__', 'approved',)
+ list_filter = ('approved',)
+ search_fields = ('org__name',)
admin.site.register(ProfessionalService, ProfessionalServiceAdmin)
diff --git a/pgweb/profserv/forms.py b/pgweb/profserv/forms.py
index f61cde67..8df2ff40 100644
--- a/pgweb/profserv/forms.py
+++ b/pgweb/profserv/forms.py
@@ -4,12 +4,12 @@ from pgweb.core.models import Organisation
from models import ProfessionalService
class ProfessionalServiceForm(forms.ModelForm):
- form_intro = """Note that in order to register a new professional service, you must first register an organisation.
+ form_intro = """Note that in order to register a new professional service, you must first register an organisation.
If you have not done so, use this form."""
- def __init__(self, *args, **kwargs):
- super(ProfessionalServiceForm, self).__init__(*args, **kwargs)
- def filter_by_user(self, user):
- self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
- class Meta:
- model = ProfessionalService
- exclude = ('submitter', 'approved', )
+ def __init__(self, *args, **kwargs):
+ super(ProfessionalServiceForm, self).__init__(*args, **kwargs)
+ def filter_by_user(self, user):
+ self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+ class Meta:
+ model = ProfessionalService
+ exclude = ('submitter', 'approved', )
diff --git a/pgweb/profserv/models.py b/pgweb/profserv/models.py
index c72b134d..180e1d43 100644
--- a/pgweb/profserv/models.py
+++ b/pgweb/profserv/models.py
@@ -3,40 +3,40 @@ from django.db import models
from pgweb.core.models import Organisation
class ProfessionalService(models.Model):
- approved = models.BooleanField(null=False, blank=False, default=False)
+ approved = models.BooleanField(null=False, blank=False, default=False)
- org = models.OneToOneField(Organisation, null=False, blank=False,
- db_column="organisation_id",
- verbose_name="organisation",
- help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.")
- description = models.TextField(null=False,blank=False)
- employees = models.CharField(max_length=32, null=True, blank=True)
- locations = models.CharField(max_length=128, null=True, blank=True)
- region_africa = models.BooleanField(null=False, default=False, verbose_name="Africa")
- region_asia = models.BooleanField(null=False, default=False, verbose_name="Asia")
- region_europe = models.BooleanField(null=False, default=False, verbose_name="Europe")
- region_northamerica = models.BooleanField(null=False, default=False, verbose_name="North America")
- region_oceania = models.BooleanField(null=False, default=False, verbose_name="Oceania")
- region_southamerica = models.BooleanField(null=False, default=False, verbose_name="South America")
- hours = models.CharField(max_length=128, null=True, blank=True)
- languages = models.CharField(max_length=128, null=True, blank=True)
- customerexample = models.TextField(blank=True, null=True, verbose_name="Customer Example")
- experience = models.TextField(blank=True, null=True)
- contact = models.TextField(null=True, blank=True)
- url = models.URLField(max_length=128, null=True, blank=True, verbose_name="URL")
- provides_support = models.BooleanField(null=False, default=False)
- provides_hosting = models.BooleanField(null=False, default=False)
- interfaces = models.CharField(max_length=512, null=True, blank=True, verbose_name="Interfaces (for hosting)")
+ org = models.OneToOneField(Organisation, null=False, blank=False,
+ db_column="organisation_id",
+ verbose_name="organisation",
+ help_text="If no organisations are listed, please check the organisation list and contact the organisation manager or webmaster@postgresql.org if none are listed.")
+ description = models.TextField(null=False,blank=False)
+ employees = models.CharField(max_length=32, null=True, blank=True)
+ locations = models.CharField(max_length=128, null=True, blank=True)
+ region_africa = models.BooleanField(null=False, default=False, verbose_name="Africa")
+ region_asia = models.BooleanField(null=False, default=False, verbose_name="Asia")
+ region_europe = models.BooleanField(null=False, default=False, verbose_name="Europe")
+ region_northamerica = models.BooleanField(null=False, default=False, verbose_name="North America")
+ region_oceania = models.BooleanField(null=False, default=False, verbose_name="Oceania")
+ region_southamerica = models.BooleanField(null=False, default=False, verbose_name="South America")
+ hours = models.CharField(max_length=128, null=True, blank=True)
+ languages = models.CharField(max_length=128, null=True, blank=True)
+ customerexample = models.TextField(blank=True, null=True, verbose_name="Customer Example")
+ experience = models.TextField(blank=True, null=True)
+ contact = models.TextField(null=True, blank=True)
+ url = models.URLField(max_length=128, null=True, blank=True, verbose_name="URL")
+ provides_support = models.BooleanField(null=False, default=False)
+ provides_hosting = models.BooleanField(null=False, default=False)
+ interfaces = models.CharField(max_length=512, null=True, blank=True, verbose_name="Interfaces (for hosting)")
- purge_urls = ('/support/professional_', )
+ purge_urls = ('/support/professional_', )
- send_notification = True
+ send_notification = True
- def verify_submitter(self, user):
- return (len(self.org.managers.filter(pk=user.pk)) == 1)
+ def verify_submitter(self, user):
+ return (len(self.org.managers.filter(pk=user.pk)) == 1)
- def __unicode__(self):
- return self.org.name
+ def __unicode__(self):
+ return self.org.name
- class Meta:
- ordering = ('org__name',)
+ class Meta:
+ ordering = ('org__name',)
diff --git a/pgweb/profserv/struct.py b/pgweb/profserv/struct.py
index e3192d2d..659753b2 100644
--- a/pgweb/profserv/struct.py
+++ b/pgweb/profserv/struct.py
@@ -1,6 +1,6 @@
from views import regions
def get_struct():
- for key, name in regions:
- yield ('support/professional_support/%s/' % key, None)
- yield ('support/professional_hosting/%s/' % key, None)
+ for key, name in regions:
+ yield ('support/professional_support/%s/' % key, None)
+ yield ('support/professional_hosting/%s/' % key, None)
diff --git a/pgweb/profserv/views.py b/pgweb/profserv/views.py
index eb08c510..ad135d22 100644
--- a/pgweb/profserv/views.py
+++ b/pgweb/profserv/views.py
@@ -17,44 +17,44 @@ regions = (
)
def root(request, servtype):
- title = servtype=='support' and 'Professional Services' or 'Hosting Providers'
- what = servtype=='support' and 'support' or 'hosting'
- support = servtype=='support'
- return render_pgweb(request, 'support', 'profserv/root.html', {
- 'title': title,
- 'support': support,
- 'regions': regions,
- 'what': what,
- })
+ title = servtype=='support' and 'Professional Services' or 'Hosting Providers'
+ what = servtype=='support' and 'support' or 'hosting'
+ support = servtype=='support'
+ return render_pgweb(request, 'support', 'profserv/root.html', {
+ 'title': title,
+ 'support': support,
+ 'regions': regions,
+ 'what': what,
+ })
def region(request, servtype, regionname):
- regname = [n for r,n in regions if r==regionname]
- if not regname:
- raise Http404
- regname = regname[0]
+ regname = [n for r,n in regions if r==regionname]
+ if not regname:
+ raise Http404
+ regname = regname[0]
- what = servtype=='support' and 'support' or 'hosting'
- whatname = servtype=='support' and 'Professional Services' or 'Hosting Providers'
- title = "%s - %s" % (whatname, regname)
- support = servtype=='support'
+ what = servtype=='support' and 'support' or 'hosting'
+ whatname = servtype=='support' and 'Professional Services' or 'Hosting Providers'
+ title = "%s - %s" % (whatname, regname)
+ support = servtype=='support'
- # DB model is a bit funky here, so use the extra-where functionality to filter properly.
- # Field names are cleaned up earlier, so it's safe against injections.
- services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what),])
+ # DB model is a bit funky here, so use the extra-where functionality to filter properly.
+ # Field names are cleaned up earlier, so it's safe against injections.
+ services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what),])
- return render_pgweb(request, 'support', 'profserv/list.html', {
- 'title': title,
- 'support': support,
- 'what': what,
- 'whatname': whatname,
- 'regionname': regname,
- 'services': services,
- })
+ return render_pgweb(request, 'support', 'profserv/list.html', {
+ 'title': title,
+ 'support': support,
+ 'what': what,
+ 'whatname': whatname,
+ 'regionname': regname,
+ 'services': services,
+ })
# Forms to edit
@login_required
def profservform(request, itemid):
- return simple_form(ProfessionalService, itemid, request, ProfessionalServiceForm,
- redirect='/account/edit/services/')
+ return simple_form(ProfessionalService, itemid, request, ProfessionalServiceForm,
+ redirect='/account/edit/services/')
diff --git a/pgweb/pugs/admin.py b/pgweb/pugs/admin.py
index b4743154..35e13e73 100644
--- a/pgweb/pugs/admin.py
+++ b/pgweb/pugs/admin.py
@@ -4,8 +4,8 @@ from pgweb.util.admin import PgwebAdmin
from models import PUG
class PUGAdmin(PgwebAdmin):
- list_display = ('title', 'approved', )
- list_filter = ('approved', )
- search_fields = ('title', )
+ list_display = ('title', 'approved', )
+ list_filter = ('approved', )
+ search_fields = ('title', )
admin.site.register(PUG, PUGAdmin)
diff --git a/pgweb/pugs/models.py b/pgweb/pugs/models.py
index a72e0626..54383f10 100644
--- a/pgweb/pugs/models.py
+++ b/pgweb/pugs/models.py
@@ -1,19 +1,19 @@
from django.db import models
class PUG(models.Model):
- """
- contains information about a local PostgreSQL user group
- """
- country = models.ForeignKey('core.Country')
- org = models.ForeignKey('core.Organisation', null=True, blank=True, help_text='Organisation that manages the PUG and its contents')
- approved = models.BooleanField(null=False, blank=False, default=False)
- locale = models.CharField(max_length=255, help_text="Locale where the PUG meets, e.g. 'New York City'")
- title = models.CharField(max_length=255, help_text="Title/Name of the PUG, e.g. 'NYC PostgreSQL User Group'")
- website_url = models.TextField(null=True, blank=True)
- mailing_list_url = models.TextField(null=True, blank=True)
+ """
+ contains information about a local PostgreSQL user group
+ """
+ country = models.ForeignKey('core.Country')
+ org = models.ForeignKey('core.Organisation', null=True, blank=True, help_text='Organisation that manages the PUG and its contents')
+ approved = models.BooleanField(null=False, blank=False, default=False)
+ locale = models.CharField(max_length=255, help_text="Locale where the PUG meets, e.g. 'New York City'")
+ title = models.CharField(max_length=255, help_text="Title/Name of the PUG, e.g. 'NYC PostgreSQL User Group'")
+ website_url = models.TextField(null=True, blank=True)
+ mailing_list_url = models.TextField(null=True, blank=True)
- purge_urls = ('/community/user-groups/', )
- send_notification = True
+ purge_urls = ('/community/user-groups/', )
+ send_notification = True
- def __unicode__(self):
- return self.title
+ def __unicode__(self):
+ return self.title
diff --git a/pgweb/pugs/views.py b/pgweb/pugs/views.py
index 83414ed7..167fd1f0 100644
--- a/pgweb/pugs/views.py
+++ b/pgweb/pugs/views.py
@@ -3,18 +3,18 @@ from pgweb.util.contexts import render_pgweb
from models import PUG
def index(request):
- """
- contains list of PUGs, in country/locale alphabetical order
- """
- pug_list = []
- for pug in PUG.objects.filter(approved=True).order_by('country__name', 'locale').all():
- if pug_list and pug_list[-1].get('country') == pug.country.name:
- pug_list[-1]['pugs'].append(pug)
- else:
- pug_list.append({
- 'country': pug.country.name,
- 'pugs': [pug]
- })
- return render_pgweb(request, 'community', 'pugs/index.html', {
- 'pug_list': pug_list,
- })
+ """
+ contains list of PUGs, in country/locale alphabetical order
+ """
+ pug_list = []
+ for pug in PUG.objects.filter(approved=True).order_by('country__name', 'locale').all():
+ if pug_list and pug_list[-1].get('country') == pug.country.name:
+ pug_list[-1]['pugs'].append(pug)
+ else:
+ pug_list.append({
+ 'country': pug.country.name,
+ 'pugs': [pug]
+ })
+ return render_pgweb(request, 'community', 'pugs/index.html', {
+ 'pug_list': pug_list,
+ })
diff --git a/pgweb/quotes/admin.py b/pgweb/quotes/admin.py
index 8b5ed8cf..39267e17 100644
--- a/pgweb/quotes/admin.py
+++ b/pgweb/quotes/admin.py
@@ -2,6 +2,6 @@ from django.contrib import admin
from models import Quote
class QuoteAdmin(admin.ModelAdmin):
- list_display = ('quote', 'who', 'org', )
+ list_display = ('quote', 'who', 'org', )
admin.site.register(Quote, QuoteAdmin)
diff --git a/pgweb/quotes/models.py b/pgweb/quotes/models.py
index f390f65e..fa690bcf 100644
--- a/pgweb/quotes/models.py
+++ b/pgweb/quotes/models.py
@@ -1,18 +1,18 @@
from django.db import models
class Quote(models.Model):
- approved = models.BooleanField(null=False, default=False)
- quote = models.TextField(null=False, blank=False)
- who = models.CharField(max_length=100, null=False, blank=False)
- org = models.CharField(max_length=100, null=False, blank=False)
- link = models.URLField(null=False, blank=False)
+ approved = models.BooleanField(null=False, default=False)
+ quote = models.TextField(null=False, blank=False)
+ who = models.CharField(max_length=100, null=False, blank=False)
+ org = models.CharField(max_length=100, null=False, blank=False)
+ link = models.URLField(null=False, blank=False)
- send_notification = True
+ send_notification = True
- purge_urls = ('/about/quotesarchive/', '/$', )
+ purge_urls = ('/about/quotesarchive/', '/$', )
- def __unicode__(self):
- if len(self.quote) > 75:
- return "%s..." % self.quote[:75]
- else:
- return self.quote
+ def __unicode__(self):
+ if len(self.quote) > 75:
+ return "%s..." % self.quote[:75]
+ else:
+ return self.quote
diff --git a/pgweb/search/views.py b/pgweb/search/views.py
index c6061a78..07f5ded1 100644
--- a/pgweb/search/views.py
+++ b/pgweb/search/views.py
@@ -17,305 +17,305 @@ from pgweb.lists.models import MailingList
# Conditionally import memcached library. Everything will work without
# it, so we allow development installs to run without it...
try:
- import pylibmc
- has_memcached=True
+ import pylibmc
+ has_memcached=True
except:
- has_memcached=False
+ has_memcached=False
def generate_pagelinks(pagenum, totalpages, querystring):
- # Generate a list of links to page through a search result
- # We generate these in HTML from the python code because it's
- # simply too ugly to try to do it in the template.
- if totalpages < 2:
- return
+ # Generate a list of links to page through a search result
+ # We generate these in HTML from the python code because it's
+ # simply too ugly to try to do it in the template.
+ if totalpages < 2:
+ return
- if pagenum > 1:
- # Prev link
- yield 'Prev' % (querystring, pagenum-1)
+ if pagenum > 1:
+ # Prev link
+ yield 'Prev' % (querystring, pagenum-1)
- if pagenum > 10:
- start = pagenum - 10
- else:
- start = 1
+ if pagenum > 10:
+ start = pagenum - 10
+ else:
+ start = 1
- for i in range(start, min(start+20, totalpages + 1)):
- if i == pagenum:
- yield "%s" % i
- else:
- yield '%s' % (querystring, i, i)
+ for i in range(start, min(start+20, totalpages + 1)):
+ if i == pagenum:
+ yield "%s" % i
+ else:
+ yield '%s' % (querystring, i, i)
- if pagenum != min(start+20, totalpages):
- yield 'Next' % (querystring, pagenum+1)
+ if pagenum != min(start+20, totalpages):
+ yield 'Next' % (querystring, pagenum+1)
@csrf_exempt
@cache(minutes=15)
def search(request):
- # Perform a general web search
- # Since this lives in a different database, we open a direct
- # connection with psycopg, thus bypassing everything that has to do
- # with django.
+ # Perform a general web search
+ # Since this lives in a different database, we open a direct
+ # connection with psycopg, thus bypassing everything that has to do
+ # with django.
- # constants that we might eventually want to make configurable
- hitsperpage = 20
+ # constants that we might eventually want to make configurable
+ hitsperpage = 20
- if request.GET.has_key('m') and request.GET['m'] == '1':
- searchlists = True
+ if request.GET.has_key('m') and request.GET['m'] == '1':
+ searchlists = True
- if request.GET.has_key('l'):
- if request.GET['l'] != '':
- try:
- listid = int(request.GET['l'])
- except:
- listid = None
- else:
- listid = None
- else:
- # Listid not specified. But do we have the name?
- if request.GET.has_key('ln'):
- try:
- ll = MailingList.objects.get(listname=request.GET['ln'])
- listid = ll.id
- except MailingList.DoesNotExist:
- # Invalid list name just resets the default of the form,
- # no need to throw an error.
- listid = None
- else:
- listid = None
+ if request.GET.has_key('l'):
+ if request.GET['l'] != '':
+ try:
+ listid = int(request.GET['l'])
+ except:
+ listid = None
+ else:
+ listid = None
+ else:
+ # Listid not specified. But do we have the name?
+ if request.GET.has_key('ln'):
+ try:
+ ll = MailingList.objects.get(listname=request.GET['ln'])
+ listid = ll.id
+ except MailingList.DoesNotExist:
+ # Invalid list name just resets the default of the form,
+ # no need to throw an error.
+ listid = None
+ else:
+ listid = None
- if request.GET.has_key('d'):
- try:
- dateval = int(request.GET['d'])
- except:
- dateval = None
- else:
- dateval = None
+ if request.GET.has_key('d'):
+ try:
+ dateval = int(request.GET['d'])
+ except:
+ dateval = None
+ else:
+ dateval = None
- if request.GET.has_key('s'):
- listsort = request.GET['s']
- if not listsort in ('r', 'd', 'i'):
- listsort = 'r'
- else:
- listsort = 'r'
+ if request.GET.has_key('s'):
+ listsort = request.GET['s']
+ if not listsort in ('r', 'd', 'i'):
+ listsort = 'r'
+ else:
+ listsort = 'r'
- if not dateval:
- dateval = 365
+ if not dateval:
+ dateval = 365
- sortoptions = (
- {'val':'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')},
- {'val':'d', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'},
- {'val':'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'},
- )
- dateoptions = (
- {'val': -1, 'text': 'anytime'},
- {'val': 1, 'text': 'within last day'},
- {'val': 7, 'text': 'within last week'},
- {'val': 31, 'text': 'within last month'},
- {'val': 186, 'text': 'within last 6 months'},
- {'val': 365, 'text': 'within last year'},
- )
- else:
- searchlists = False
- if request.GET.has_key('u'):
- suburl = request.GET['u']
- else:
- suburl = None
+ sortoptions = (
+ {'val':'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')},
+ {'val':'d', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'},
+ {'val':'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'},
+ )
+ dateoptions = (
+ {'val': -1, 'text': 'anytime'},
+ {'val': 1, 'text': 'within last day'},
+ {'val': 7, 'text': 'within last week'},
+ {'val': 31, 'text': 'within last month'},
+ {'val': 186, 'text': 'within last 6 months'},
+ {'val': 365, 'text': 'within last year'},
+ )
+ else:
+ searchlists = False
+ if request.GET.has_key('u'):
+ suburl = request.GET['u']
+ else:
+ suburl = None
- if request.GET.has_key('a'):
- allsites = (request.GET['a'] == "1")
- else:
- allsites = False
+ if request.GET.has_key('a'):
+ allsites = (request.GET['a'] == "1")
+ else:
+ allsites = False
- # Check that we actually have something to search for
- if not request.GET.has_key('q') or request.GET['q'] == '':
- if searchlists:
- return render(request, 'search/listsearch.html', {
- 'search_error': "No search term specified.",
- 'sortoptions': sortoptions,
- 'lists': MailingList.objects.all().order_by("group__sortkey"),
- 'listid': listid,
- 'dates': dateoptions,
- 'dateval': dateval,
- })
- else:
- return render(request, 'search/sitesearch.html', {
- 'search_error': "No search term specified.",
- })
- query = request.GET['q'].strip()
+ # Check that we actually have something to search for
+ if not request.GET.has_key('q') or request.GET['q'] == '':
+ if searchlists:
+ return render(request, 'search/listsearch.html', {
+ 'search_error': "No search term specified.",
+ 'sortoptions': sortoptions,
+ 'lists': MailingList.objects.all().order_by("group__sortkey"),
+ 'listid': listid,
+ 'dates': dateoptions,
+ 'dateval': dateval,
+ })
+ else:
+ return render(request, 'search/sitesearch.html', {
+ 'search_error': "No search term specified.",
+ })
+ query = request.GET['q'].strip()
- # Anti-stefan prevention
- if len(query) > 1000:
- return render(request, 'search/sitesearch.html', {
- 'search_error': "Search term too long.",
- })
+ # Anti-stefan prevention
+ if len(query) > 1000:
+ return render(request, 'search/sitesearch.html', {
+ 'search_error': "Search term too long.",
+ })
- # Is the request being paged?
- if request.GET.has_key('p'):
- try:
- pagenum = int(request.GET['p'])
- except:
- pagenum = 1
- else:
- pagenum = 1
+ # Is the request being paged?
+ if request.GET.has_key('p'):
+ try:
+ pagenum = int(request.GET['p'])
+ except:
+ pagenum = 1
+ else:
+ pagenum = 1
- firsthit = (pagenum - 1) * hitsperpage + 1
+ firsthit = (pagenum - 1) * hitsperpage + 1
- if searchlists:
- # Lists are searched by passing the work down using a http
- # API. In the future, we probably want to do everything
- # through a http API and merge hits, but that's for later
- p = {
- 'q': query.encode('utf-8'),
- 's': listsort,
- }
- if listid:
- if listid < 0:
- # This is a list group, we expand that on the web server
- p['ln'] = ','.join([x.listname for x in MailingList.objects.filter(group=-listid)])
- else:
- p['ln'] = MailingList.objects.get(pk=listid).listname
- if dateval:
- p['d'] = dateval
- urlstr = urllib.urlencode(p)
- # If memcached is available, let's try it
- hits = None
- if has_memcached:
- memc = pylibmc.Client(['127.0.0.1',], binary=True)
- # behavior not supported on pylibmc in squeeze:: behaviors={'tcp_nodelay':True})
- try:
- hits = memc.get(urlstr)
- except Exception:
- # If we had an exception, don't try to store either
- memc = None
- if not hits:
- # No hits found - so try to get them from the search server
- if settings.ARCHIVES_SEARCH_PLAINTEXT:
- c = httplib.HTTPConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
- else:
- c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
- c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'})
- c.sock.settimeout(20) # Set a 20 second timeout
- try:
- r = c.getresponse()
- except (socket.timeout, ssl.SSLError):
- return render(request, 'search/listsearch.html', {
- 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.',
- })
- if r.status != 200:
- memc = None
- return render(request, 'search/listsearch.html', {
- 'search_error': 'Error talking to search server: %s' % r.reason,
- })
- hits = json.loads(r.read())
- if has_memcached and memc:
- # Store them in memcached too! But only for 10 minutes...
- # And always compress it, just because we can
- memc.set(urlstr, hits, 60*10, 1)
- memc = None
+ if searchlists:
+ # Lists are searched by passing the work down using a http
+ # API. In the future, we probably want to do everything
+ # through a http API and merge hits, but that's for later
+ p = {
+ 'q': query.encode('utf-8'),
+ 's': listsort,
+ }
+ if listid:
+ if listid < 0:
+ # This is a list group, we expand that on the web server
+ p['ln'] = ','.join([x.listname for x in MailingList.objects.filter(group=-listid)])
+ else:
+ p['ln'] = MailingList.objects.get(pk=listid).listname
+ if dateval:
+ p['d'] = dateval
+ urlstr = urllib.urlencode(p)
+ # If memcached is available, let's try it
+ hits = None
+ if has_memcached:
+ memc = pylibmc.Client(['127.0.0.1',], binary=True)
+ # behavior not supported on pylibmc in squeeze:: behaviors={'tcp_nodelay':True})
+ try:
+ hits = memc.get(urlstr)
+ except Exception:
+ # If we had an exception, don't try to store either
+ memc = None
+ if not hits:
+ # No hits found - so try to get them from the search server
+ if settings.ARCHIVES_SEARCH_PLAINTEXT:
+ c = httplib.HTTPConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
+ else:
+ c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
+ c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'})
+ c.sock.settimeout(20) # Set a 20 second timeout
+ try:
+ r = c.getresponse()
+ except (socket.timeout, ssl.SSLError):
+ return render(request, 'search/listsearch.html', {
+ 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.',
+ })
+ if r.status != 200:
+ memc = None
+ return render(request, 'search/listsearch.html', {
+ 'search_error': 'Error talking to search server: %s' % r.reason,
+ })
+ hits = json.loads(r.read())
+ if has_memcached and memc:
+ # Store them in memcached too! But only for 10 minutes...
+ # And always compress it, just because we can
+ memc.set(urlstr, hits, 60*10, 1)
+ memc = None
- if isinstance(hits, dict):
- # This is not just a list of hits.
- # Right now the only supported dict result is a messageid
- # match, but make sure that's what it is.
- if hits['messageidmatch'] == 1:
- return HttpResponseRedirect("/message-id/%s" % query)
+ if isinstance(hits, dict):
+ # This is not just a list of hits.
+ # Right now the only supported dict result is a messageid
+ # match, but make sure that's what it is.
+ if hits['messageidmatch'] == 1:
+ return HttpResponseRedirect("/message-id/%s" % query)
- totalhits = len(hits)
- querystr = "?m=1&q=%s&l=%s&d=%s&s=%s" % (
- urllib.quote_plus(query.encode('utf-8')),
- listid or '',
- dateval,
- listsort
- )
+ totalhits = len(hits)
+ querystr = "?m=1&q=%s&l=%s&d=%s&s=%s" % (
+ urllib.quote_plus(query.encode('utf-8')),
+ listid or '',
+ dateval,
+ listsort
+ )
- return render(request, 'search/listsearch.html', {
- 'hitcount': totalhits,
- 'firsthit': firsthit,
- 'lasthit': min(totalhits, firsthit+hitsperpage-1),
- 'query': request.GET['q'],
- 'pagelinks': " ".join(
- generate_pagelinks(pagenum,
- totalhits / hitsperpage + 1,
- querystr)),
- 'hits': [{
- 'date': h['d'],
- 'subject': h['s'],
- 'author': h['f'],
- 'messageid': h['m'],
- 'abstract': h['a'],
- 'rank': h['r'],
- } for h in hits[firsthit-1:firsthit+hitsperpage-1]],
- 'sortoptions': sortoptions,
- 'lists': MailingList.objects.all().order_by("group__sortkey"),
- 'listid': listid,
- 'dates': dateoptions,
- 'dateval': dateval,
- })
+ return render(request, 'search/listsearch.html', {
+ 'hitcount': totalhits,
+ 'firsthit': firsthit,
+ 'lasthit': min(totalhits, firsthit+hitsperpage-1),
+ 'query': request.GET['q'],
+ 'pagelinks': " ".join(
+ generate_pagelinks(pagenum,
+ totalhits / hitsperpage + 1,
+ querystr)),
+ 'hits': [{
+ 'date': h['d'],
+ 'subject': h['s'],
+ 'author': h['f'],
+ 'messageid': h['m'],
+ 'abstract': h['a'],
+ 'rank': h['r'],
+ } for h in hits[firsthit-1:firsthit+hitsperpage-1]],
+ 'sortoptions': sortoptions,
+ 'lists': MailingList.objects.all().order_by("group__sortkey"),
+ 'listid': listid,
+ 'dates': dateoptions,
+ 'dateval': dateval,
+ })
- else:
- # Website search is still done by making a regular pgsql connection
- # to the search server.
- try:
- conn = psycopg2.connect(settings.SEARCH_DSN)
- curs = conn.cursor()
- except:
- return render(request, 'search/sitesearch.html', {
- 'search_error': 'Could not connect to search database.'
- })
+ else:
+ # Website search is still done by making a regular pgsql connection
+ # to the search server.
+ try:
+ conn = psycopg2.connect(settings.SEARCH_DSN)
+ curs = conn.cursor()
+ except:
+ return render(request, 'search/sitesearch.html', {
+ 'search_error': 'Could not connect to search database.'
+ })
- # This is kind of a hack, but... Some URLs are flagged as internal
- # and should as such only be included in searches that explicitly
- # reference the suburl that they are in.
- if suburl and suburl.startswith('/docs/devel'):
- include_internal = True
- else:
- include_internal = False
+ # This is kind of a hack, but... Some URLs are flagged as internal
+ # and should as such only be included in searches that explicitly
+ # reference the suburl that they are in.
+ if suburl and suburl.startswith('/docs/devel'):
+ include_internal = True
+ else:
+ include_internal = False
- # perform the query for general web search
- try:
- curs.execute("SELECT * FROM site_search(%(query)s, %(firsthit)s, %(hitsperpage)s, %(allsites)s, %(suburl)s, %(internal)s)", {
- 'query': query,
- 'firsthit': firsthit - 1,
- 'hitsperpage': hitsperpage,
- 'allsites': allsites,
- 'suburl': suburl,
- 'internal': include_internal,
- })
- except psycopg2.ProgrammingError:
- return render(request, 'search/sitesearch.html', {
- 'search_error': 'Error executing search query.'
- })
+ # perform the query for general web search
+ try:
+ curs.execute("SELECT * FROM site_search(%(query)s, %(firsthit)s, %(hitsperpage)s, %(allsites)s, %(suburl)s, %(internal)s)", {
+ 'query': query,
+ 'firsthit': firsthit - 1,
+ 'hitsperpage': hitsperpage,
+ 'allsites': allsites,
+ 'suburl': suburl,
+ 'internal': include_internal,
+ })
+ except psycopg2.ProgrammingError:
+ return render(request, 'search/sitesearch.html', {
+ 'search_error': 'Error executing search query.'
+ })
- hits = curs.fetchall()
- conn.close()
- totalhits = int(hits[-1][5])
- try:
- if suburl:
- quoted_suburl = urllib.quote_plus(suburl)
- else:
- quoted_suburl = ''
- except:
- quoted_suburl = ''
- querystr = "?q=%s&a=%s&u=%s" % (
- urllib.quote_plus(query.encode('utf-8')),
- allsites and "1" or "0",
- quoted_suburl,
- )
+ hits = curs.fetchall()
+ conn.close()
+ totalhits = int(hits[-1][5])
+ try:
+ if suburl:
+ quoted_suburl = urllib.quote_plus(suburl)
+ else:
+ quoted_suburl = ''
+ except:
+ quoted_suburl = ''
+ querystr = "?q=%s&a=%s&u=%s" % (
+ urllib.quote_plus(query.encode('utf-8')),
+ allsites and "1" or "0",
+ quoted_suburl,
+ )
- return render(request, 'search/sitesearch.html', {
- 'suburl': suburl,
- 'allsites': allsites,
- 'hitcount': totalhits,
- 'firsthit': firsthit,
- 'lasthit': min(totalhits, firsthit+hitsperpage-1),
- 'query': request.GET['q'],
- 'pagelinks': " ".join(
- generate_pagelinks(pagenum,
- totalhits / hitsperpage + 1,
- querystr)),
- 'hits': [{
- 'title': h[3],
- 'url': "%s%s" % (h[1], h[2]),
- 'abstract': h[4].replace("[[[[[[", "").replace("]]]]]]",""),
- 'rank': h[5]} for h in hits[:-1]],
- })
+ return render(request, 'search/sitesearch.html', {
+ 'suburl': suburl,
+ 'allsites': allsites,
+ 'hitcount': totalhits,
+ 'firsthit': firsthit,
+ 'lasthit': min(totalhits, firsthit+hitsperpage-1),
+ 'query': request.GET['q'],
+ 'pagelinks': " ".join(
+ generate_pagelinks(pagenum,
+ totalhits / hitsperpage + 1,
+ querystr)),
+ 'hits': [{
+ 'title': h[3],
+ 'url': "%s%s" % (h[1], h[2]),
+ 'abstract': h[4].replace("[[[[[[", "").replace("]]]]]]",""),
+ 'rank': h[5]} for h in hits[:-1]],
+ })
diff --git a/pgweb/security/admin.py b/pgweb/security/admin.py
index 9863a764..977a407b 100644
--- a/pgweb/security/admin.py
+++ b/pgweb/security/admin.py
@@ -7,59 +7,59 @@ from pgweb.news.models import NewsArticle
from models import SecurityPatch, SecurityPatchVersion
class VersionChoiceField(forms.ModelChoiceField):
- def label_from_instance(self, obj):
- return obj.numtree
+ def label_from_instance(self, obj):
+ return obj.numtree
class SecurityPatchVersionAdminForm(forms.ModelForm):
- model = SecurityPatchVersion
- version = VersionChoiceField(queryset=Version.objects.filter(tree__gt=0), required=True)
+ model = SecurityPatchVersion
+ version = VersionChoiceField(queryset=Version.objects.filter(tree__gt=0), required=True)
class SecurityPatchVersionAdmin(admin.TabularInline):
- model = SecurityPatchVersion
- extra = 2
- form = SecurityPatchVersionAdminForm
+ model = SecurityPatchVersion
+ extra = 2
+ form = SecurityPatchVersionAdminForm
class SecurityPatchForm(forms.ModelForm):
- model = SecurityPatch
- newspost = forms.ModelChoiceField(queryset=NewsArticle.objects.filter(org=settings.PGDG_ORG_ID), required=False)
+ model = SecurityPatch
+ newspost = forms.ModelChoiceField(queryset=NewsArticle.objects.filter(org=settings.PGDG_ORG_ID), required=False)
- def clean(self):
- d = super(SecurityPatchForm, self).clean()
- vecs = [v for k,v in d.items() if k.startswith('vector_')]
- empty = [v for v in vecs if v == '']
- if len(empty) != len(vecs) and len(empty) != 0:
- for k in d.keys():
- if k.startswith('vector_'):
- self.add_error(k, 'Either specify all vector values or none')
- return d
+ def clean(self):
+ d = super(SecurityPatchForm, self).clean()
+ vecs = [v for k,v in d.items() if k.startswith('vector_')]
+ empty = [v for v in vecs if v == '']
+ if len(empty) != len(vecs) and len(empty) != 0:
+ for k in d.keys():
+ if k.startswith('vector_'):
+ self.add_error(k, 'Either specify all vector values or none')
+ return d
class SecurityPatchAdmin(admin.ModelAdmin):
- form = SecurityPatchForm
- exclude = ['cvenumber', ]
- inlines = (SecurityPatchVersionAdmin, )
- list_display = ('cve', 'public', 'cvssscore', 'legacyscore', 'cvssvector', 'description')
- actions = ['make_public', 'make_unpublic']
+ form = SecurityPatchForm
+ exclude = ['cvenumber', ]
+ inlines = (SecurityPatchVersionAdmin, )
+ list_display = ('cve', 'public', 'cvssscore', 'legacyscore', 'cvssvector', 'description')
+ actions = ['make_public', 'make_unpublic']
- def cvssvector(self, obj):
- if not obj.cvssvector:
- return ''
- return '{0}'.format(
- obj.cvssvector)
- cvssvector.allow_tags = True
- cvssvector.short_description = "CVSS vector link"
+ def cvssvector(self, obj):
+ if not obj.cvssvector:
+ return ''
+ return '{0}'.format(
+ obj.cvssvector)
+ cvssvector.allow_tags = True
+ cvssvector.short_description = "CVSS vector link"
- def cvssscore(self, obj):
- return obj.cvssscore
- cvssscore.short_description = "CVSS score"
+ def cvssscore(self, obj):
+ return obj.cvssscore
+ cvssscore.short_description = "CVSS score"
- def make_public(self, request, queryset):
- self.do_public(queryset, True)
- def make_unpublic(self, request, queryset):
- self.do_public(queryset, False)
- def do_public(self, queryset, val):
- # Intentionally loop and do manually, so we generate change notices
- for p in queryset.all():
- p.public=val
- p.save()
+ def make_public(self, request, queryset):
+ self.do_public(queryset, True)
+ def make_unpublic(self, request, queryset):
+ self.do_public(queryset, False)
+ def do_public(self, queryset, val):
+ # Intentionally loop and do manually, so we generate change notices
+ for p in queryset.all():
+ p.public=val
+ p.save()
admin.site.register(SecurityPatch, SecurityPatchAdmin)
diff --git a/pgweb/security/management/commands/update_cve_links.py b/pgweb/security/management/commands/update_cve_links.py
index 159b124e..e74c072a 100644
--- a/pgweb/security/management/commands/update_cve_links.py
+++ b/pgweb/security/management/commands/update_cve_links.py
@@ -14,24 +14,24 @@ from pgweb.util.misc import varnish_purge
import requests
class Command(BaseCommand):
- help = 'Update CVE links'
+ help = 'Update CVE links'
- def handle(self, *args, **options):
- with transaction.atomic():
- newly_visible = []
- for s in SecurityPatch.objects.filter(cve_visible=False):
- r = requests.get(s.cvelink, timeout=10)
- if r.status_code == 200:
- newly_visible.append(s.cve)
- s.cve_visible = True
- s.save()
- if newly_visible:
- send_simple_mail(settings.NOTIFICATION_FROM,
- settings.NOTIFICATION_EMAIL,
- "CVE entries made public",
- """The following CVE entries are now public upstream,
+ def handle(self, *args, **options):
+ with transaction.atomic():
+ newly_visible = []
+ for s in SecurityPatch.objects.filter(cve_visible=False):
+ r = requests.get(s.cvelink, timeout=10)
+ if r.status_code == 200:
+ newly_visible.append(s.cve)
+ s.cve_visible = True
+ s.save()
+ if newly_visible:
+ send_simple_mail(settings.NOTIFICATION_FROM,
+ settings.NOTIFICATION_EMAIL,
+ "CVE entries made public",
+ """The following CVE entries are now public upstream,
and have been made visible on the website.
{0}
""".format("\n".join(newly_visible)))
- map(varnish_purge, SecurityPatch.purge_urls)
+ map(varnish_purge, SecurityPatch.purge_urls)
diff --git a/pgweb/security/migrations/0002_cve_visible.py b/pgweb/security/migrations/0002_cve_visible.py
index 03661226..c6bc6ad0 100644
--- a/pgweb/security/migrations/0002_cve_visible.py
+++ b/pgweb/security/migrations/0002_cve_visible.py
@@ -16,7 +16,7 @@ class Migration(migrations.Migration):
name='cve_visible',
field=models.BooleanField(default=True),
),
- migrations.AlterField(
+ migrations.AlterField(
model_name='securitypatch',
name='cve_visible',
field=models.BooleanField(default=False),
diff --git a/pgweb/security/models.py b/pgweb/security/models.py
index 0c6e7bab..34166fca 100644
--- a/pgweb/security/models.py
+++ b/pgweb/security/models.py
@@ -11,106 +11,106 @@ import cvss
vector_choices = {k:list(v.items()) for k,v in cvss.constants3.METRICS_VALUE_NAMES.items()}
component_choices = (
- ('core server', 'Core server product'),
- ('client', 'Client library or application only'),
- ('contrib module', 'Contrib module only'),
- ('client contrib module', 'Client contrib module only'),
- ('packaging', 'Packaging, e.g. installers or RPM'),
- ('other', 'Other'),
+ ('core server', 'Core server product'),
+ ('client', 'Client library or application only'),
+ ('contrib module', 'Contrib module only'),
+ ('client contrib module', 'Client contrib module only'),
+ ('packaging', 'Packaging, e.g. installers or RPM'),
+ ('other', 'Other'),
)
re_cve = re.compile('^(\d{4})-(\d{4,5})$')
def cve_validator(val):
- if not re_cve.match(val):
- raise ValidationError("Enter CVE in format 0000-0000 without the CVE text")
+ if not re_cve.match(val):
+ raise ValidationError("Enter CVE in format 0000-0000 without the CVE text")
def other_vectors_validator(val):
- if val != val.upper():
- raise ValidationError("Vector must be uppercase")
+ if val != val.upper():
+ raise ValidationError("Vector must be uppercase")
- try:
- for vector in val.split('/'):
- k,v = vector.split(':')
- if not cvss.constants3.METRICS_VALUES.has_key(k):
- raise ValidationError("Metric {0} is unknown".format(k))
- if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'):
- raise ValidationError("Metric {0} must be specified in the dropdowns".format(k))
- if not cvss.constants3.METRICS_VALUES[k].has_key(v):
- raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format(
- k,v,
- ", ".join(cvss.constants3.METRICS_VALUES[k].keys()),
- ))
- except ValidationError:
- raise
- except Exception, e:
- raise ValidationError("Failed to parse vectors: %s" % e)
+ try:
+ for vector in val.split('/'):
+ k,v = vector.split(':')
+ if not cvss.constants3.METRICS_VALUES.has_key(k):
+ raise ValidationError("Metric {0} is unknown".format(k))
+ if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'):
+ raise ValidationError("Metric {0} must be specified in the dropdowns".format(k))
+ if not cvss.constants3.METRICS_VALUES[k].has_key(v):
+ raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format(
+ k,v,
+ ", ".join(cvss.constants3.METRICS_VALUES[k].keys()),
+ ))
+ except ValidationError:
+ raise
+ except Exception, e:
+ raise ValidationError("Failed to parse vectors: %s" % e)
class SecurityPatch(models.Model):
- public = models.BooleanField(null=False, blank=False, default=False)
- newspost = models.ForeignKey(NewsArticle, null=True, blank=True)
- cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator,])
- cve_visible = models.BooleanField(null=False, blank=False, default=False)
- cvenumber = models.IntegerField(null=False, blank=False, db_index=True)
- detailslink = models.URLField(null=False, blank=True)
- description = models.TextField(null=False, blank=False)
- component = models.CharField(max_length=32, null=False, blank=False, help_text="If multiple components, choose the most critical one", choices=component_choices)
+ public = models.BooleanField(null=False, blank=False, default=False)
+ newspost = models.ForeignKey(NewsArticle, null=True, blank=True)
+ cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator,])
+ cve_visible = models.BooleanField(null=False, blank=False, default=False)
+ cvenumber = models.IntegerField(null=False, blank=False, db_index=True)
+ detailslink = models.URLField(null=False, blank=True)
+ description = models.TextField(null=False, blank=False)
+ component = models.CharField(max_length=32, null=False, blank=False, help_text="If multiple components, choose the most critical one", choices=component_choices)
- versions = models.ManyToManyField(Version, through='SecurityPatchVersion')
+ versions = models.ManyToManyField(Version, through='SecurityPatchVersion')
- vector_av = models.CharField(max_length=1, null=False, blank=True, verbose_name="Attack Vector", choices=vector_choices['AV'])
- vector_ac = models.CharField(max_length=1, null=False, blank=True, verbose_name="Attack Complexity", choices=vector_choices['AC'])
- vector_pr = models.CharField(max_length=1, null=False, blank=True, verbose_name="Privileges Required", choices=vector_choices['PR'])
- vector_ui = models.CharField(max_length=1, null=False, blank=True, verbose_name="User Interaction", choices=vector_choices['UI'])
- vector_s = models.CharField(max_length=1, null=False, blank=True, verbose_name="Scope", choices=vector_choices['S'])
- vector_c = models.CharField(max_length=1, null=False, blank=True, verbose_name="Confidentiality Impact", choices=vector_choices['C'])
- vector_i = models.CharField(max_length=1, null=False, blank=True, verbose_name="Integrity Impact", choices=vector_choices['I'])
- vector_a = models.CharField(max_length=1, null=False, blank=True, verbose_name="Availability Impact", choices=vector_choices['A'])
- legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'),('B','B'),('C','C'),('D','D')))
+ vector_av = models.CharField(max_length=1, null=False, blank=True, verbose_name="Attack Vector", choices=vector_choices['AV'])
+ vector_ac = models.CharField(max_length=1, null=False, blank=True, verbose_name="Attack Complexity", choices=vector_choices['AC'])
+ vector_pr = models.CharField(max_length=1, null=False, blank=True, verbose_name="Privileges Required", choices=vector_choices['PR'])
+ vector_ui = models.CharField(max_length=1, null=False, blank=True, verbose_name="User Interaction", choices=vector_choices['UI'])
+ vector_s = models.CharField(max_length=1, null=False, blank=True, verbose_name="Scope", choices=vector_choices['S'])
+ vector_c = models.CharField(max_length=1, null=False, blank=True, verbose_name="Confidentiality Impact", choices=vector_choices['C'])
+ vector_i = models.CharField(max_length=1, null=False, blank=True, verbose_name="Integrity Impact", choices=vector_choices['I'])
+ vector_a = models.CharField(max_length=1, null=False, blank=True, verbose_name="Availability Impact", choices=vector_choices['A'])
+ legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'),('B','B'),('C','C'),('D','D')))
- purge_urls = ('/support/security/', )
+ purge_urls = ('/support/security/', )
- def save(self, force_insert=False, force_update=False):
- # Calculate a number from the CVE, that we can use to sort by. We need to
- # do this, because CVEs can have 4 or 5 digit second parts...
- if self.cve == '':
- self.cvenumber = 0
- else:
- m = re_cve.match(self.cve)
- if not m:
- raise ValidationError("Invalid CVE, should not get here!")
- self.cvenumber = 100000 * int(m.groups(0)[0]) + int(m.groups(0)[1])
- super(SecurityPatch, self).save(force_insert, force_update)
+ def save(self, force_insert=False, force_update=False):
+ # Calculate a number from the CVE, that we can use to sort by. We need to
+ # do this, because CVEs can have 4 or 5 digit second parts...
+ if self.cve == '':
+ self.cvenumber = 0
+ else:
+ m = re_cve.match(self.cve)
+ if not m:
+ raise ValidationError("Invalid CVE, should not get here!")
+ self.cvenumber = 100000 * int(m.groups(0)[0]) + int(m.groups(0)[1])
+ super(SecurityPatch, self).save(force_insert, force_update)
- def __unicode__(self):
- return self.cve
+ def __unicode__(self):
+ return self.cve
- @property
- def cvssvector(self):
- if not self.vector_av:
- return None
- s = 'AV:{0}/AC:{1}/PR:{2}/UI:{3}/S:{4}/C:{5}/I:{6}/A:{7}'.format(
- self.vector_av, self.vector_ac, self.vector_pr, self.vector_ui,
- self.vector_s, self.vector_c, self.vector_i, self.vector_a)
- return s
+ @property
+ def cvssvector(self):
+ if not self.vector_av:
+ return None
+ s = 'AV:{0}/AC:{1}/PR:{2}/UI:{3}/S:{4}/C:{5}/I:{6}/A:{7}'.format(
+ self.vector_av, self.vector_ac, self.vector_pr, self.vector_ui,
+ self.vector_s, self.vector_c, self.vector_i, self.vector_a)
+ return s
- @property
- def cvssscore(self):
- try:
- c = cvss.CVSS3("CVSS:3.0/" + self.cvssvector)
- return c.base_score
- except Exception:
- return -1
+ @property
+ def cvssscore(self):
+ try:
+ c = cvss.CVSS3("CVSS:3.0/" + self.cvssvector)
+ return c.base_score
+ except Exception:
+ return -1
- @property
- def cvelink(self):
- return "https://access.redhat.com/security/cve/CVE-{0}".format(self.cve)
+ @property
+ def cvelink(self):
+ return "https://access.redhat.com/security/cve/CVE-{0}".format(self.cve)
- class Meta:
- verbose_name_plural = 'Security patches'
- ordering = ('-cvenumber',)
+ class Meta:
+ verbose_name_plural = 'Security patches'
+ ordering = ('-cvenumber',)
class SecurityPatchVersion(models.Model):
- patch = models.ForeignKey(SecurityPatch, null=False, blank=False)
- version = models.ForeignKey(Version, null=False, blank=False)
- fixed_minor = models.IntegerField(null=False, blank=False)
+ patch = models.ForeignKey(SecurityPatch, null=False, blank=False)
+ version = models.ForeignKey(Version, null=False, blank=False)
+ fixed_minor = models.IntegerField(null=False, blank=False)
diff --git a/pgweb/security/views.py b/pgweb/security/views.py
index 86dfa3e4..eeba3663 100644
--- a/pgweb/security/views.py
+++ b/pgweb/security/views.py
@@ -6,25 +6,25 @@ from pgweb.core.models import Version
from models import SecurityPatch
def GetPatchesList(filt):
- return SecurityPatch.objects.raw("SELECT p.*, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END ORDER BY v.tree) AS affected, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END || '.' || fixed_minor ORDER BY v.tree) AS fixed FROM security_securitypatch p INNER JOIN security_securitypatchversion sv ON p.id=sv.patch_id INNER JOIN core_version v ON v.id=sv.version_id WHERE p.public AND {0} GROUP BY p.id ORDER BY cvenumber DESC".format(filt))
+ return SecurityPatch.objects.raw("SELECT p.*, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END ORDER BY v.tree) AS affected, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END || '.' || fixed_minor ORDER BY v.tree) AS fixed FROM security_securitypatch p INNER JOIN security_securitypatchversion sv ON p.id=sv.patch_id INNER JOIN core_version v ON v.id=sv.version_id WHERE p.public AND {0} GROUP BY p.id ORDER BY cvenumber DESC".format(filt))
def _list_patches(request, filt):
- patches = GetPatchesList(filt)
+ patches = GetPatchesList(filt)
- return render_pgweb(request, 'support', 'security/security.html', {
- 'patches': patches,
- 'supported': Version.objects.filter(supported=True),
- 'unsupported': Version.objects.filter(supported=False, tree__gt=0).extra(
- where=["EXISTS (SELECT 1 FROM security_securitypatchversion pv WHERE pv.version_id=core_version.id)"],
- ),
- })
+ return render_pgweb(request, 'support', 'security/security.html', {
+ 'patches': patches,
+ 'supported': Version.objects.filter(supported=True),
+ 'unsupported': Version.objects.filter(supported=False, tree__gt=0).extra(
+ where=["EXISTS (SELECT 1 FROM security_securitypatchversion pv WHERE pv.version_id=core_version.id)"],
+ ),
+ })
def index(request):
- # Show all supported versions
- return _list_patches(request, "v.supported")
+ # Show all supported versions
+ return _list_patches(request, "v.supported")
def version(request, numtree):
- version = get_object_or_404(Version, tree=numtree)
- # It's safe to pass in the value since we get it from the module, not from
- # the actual querystring.
- return _list_patches(request, "EXISTS (SELECT 1 FROM security_securitypatchversion svv WHERE svv.version_id={0} AND svv.patch_id=p.id)".format(version.id))
+ version = get_object_or_404(Version, tree=numtree)
+ # It's safe to pass in the value since we get it from the module, not from
+ # the actual querystring.
+ return _list_patches(request, "EXISTS (SELECT 1 FROM security_securitypatchversion svv WHERE svv.version_id={0} AND svv.patch_id=p.id)".format(version.id))
diff --git a/pgweb/settings.py b/pgweb/settings.py
index 11789ecc..68a86f06 100644
--- a/pgweb/settings.py
+++ b/pgweb/settings.py
@@ -3,17 +3,17 @@
DEBUG = False
ADMINS = (
- ('PostgreSQL Webmaster', 'webmaster@postgresql.org'),
+ ('PostgreSQL Webmaster', 'webmaster@postgresql.org'),
)
MANAGERS = ADMINS
DATABASES={
- 'default': {
- 'ENGINE': 'django.db.backends.postgresql_psycopg2',
- 'NAME': 'pgweb',
- }
- }
+ 'default': {
+ 'ENGINE': 'django.db.backends.postgresql_psycopg2',
+ 'NAME': 'pgweb',
+ }
+ }
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
@@ -43,7 +43,7 @@ MEDIA_URL = ''
STATIC_URL = '/media/'
STATICFILES_DIRS = (
- 'media/',
+ 'media/',
)
# Make this unique, and don't share it with anybody.
@@ -52,7 +52,7 @@ SECRET_KEY = 'REALLYCHANGETHISINSETTINGS_LOCAL.PY'
MIDDLEWARE_CLASSES = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'pgweb.util.middleware.PgMiddleware',
@@ -63,21 +63,21 @@ CSRF_FAILURE_VIEW='pgweb.core.views.csrf_failure'
ROOT_URLCONF = 'pgweb.urls'
TEMPLATES = [{
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'DIRS': ['templates', ],
- 'OPTIONS': {
- 'context_processors': [
- 'django.contrib.auth.context_processors.auth',
- 'django.contrib.messages.context_processors.messages',
- 'django.template.context_processors.media',
- 'pgweb.util.contexts.PGWebContextProcessor',
- ],
- 'loaders': [
- 'pgweb.util.templateloader.TrackingTemplateLoader',
- 'django.template.loaders.filesystem.Loader',
- 'django.template.loaders.app_directories.Loader',
- ],
- },
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'DIRS': ['templates', ],
+ 'OPTIONS': {
+ 'context_processors': [
+ 'django.contrib.auth.context_processors.auth',
+ 'django.contrib.messages.context_processors.messages',
+ 'django.template.context_processors.media',
+ 'pgweb.util.contexts.PGWebContextProcessor',
+ ],
+ 'loaders': [
+ 'pgweb.util.templateloader.TrackingTemplateLoader',
+ 'django.template.loaders.filesystem.Loader',
+ 'django.template.loaders.app_directories.Loader',
+ ],
+ },
}]
LOGIN_URL='/account/login/'
@@ -95,7 +95,7 @@ INSTALLED_APPS = [
'django.contrib.sessions',
'django.contrib.admin',
'django_markwhat',
- 'django.contrib.staticfiles',
+ 'django.contrib.staticfiles',
'pgweb.selectable',
'pgweb.core',
'pgweb.mailqueue',
@@ -113,7 +113,7 @@ INSTALLED_APPS = [
'pgweb.survey',
'pgweb.misc',
'pgweb.featurematrix',
- 'pgweb.search',
+ 'pgweb.search',
'pgweb.pugs',
]
@@ -151,7 +151,7 @@ DOCSREPORT_EMAIL="someone@example.com" # Address to pgsql-docs l
DOCSREPORT_NOREPLY_EMAIL="someone-noreply@example.com" # Address to no-reply pgsql-docs address
FRONTEND_SERVERS=() # A tuple containing the *IP addresses* of all the
# varnish frontend servers in use.
-FTP_MASTERS=() # A tuple containing the *IP addresses* of all machines
+FTP_MASTERS=() # A tuple containing the *IP addresses* of all machines
# trusted to upload ftp structure data
VARNISH_PURGERS=() # Extra servers that can do varnish purges through our queue
DO_ESI=False # Generate ESI tags
diff --git a/pgweb/sponsors/models.py b/pgweb/sponsors/models.py
index 6c0031ab..e5d48d83 100644
--- a/pgweb/sponsors/models.py
+++ b/pgweb/sponsors/models.py
@@ -3,47 +3,47 @@ from django.db import models
from pgweb.core.models import Country
class SponsorType(models.Model):
- typename = models.CharField(max_length=32, null=False, blank=False)
- description = models.TextField(null=False, blank=False)
- sortkey = models.IntegerField(null=False, default=10)
- # sortkey==0 --> do not show in list
+ typename = models.CharField(max_length=32, null=False, blank=False)
+ description = models.TextField(null=False, blank=False)
+ sortkey = models.IntegerField(null=False, default=10)
+ # sortkey==0 --> do not show in list
- purge_urls = ('/about/servers/', '/about/sponsors/', )
+ purge_urls = ('/about/servers/', '/about/sponsors/', )
- def __unicode__(self):
- return self.typename
+ def __unicode__(self):
+ return self.typename
- class Meta:
- ordering = ('sortkey', )
+ class Meta:
+ ordering = ('sortkey', )
class Sponsor(models.Model):
- sponsortype = models.ForeignKey(SponsorType, null=False)
- name = models.CharField(max_length=128, null=False, blank=False)
- url = models.URLField(null=False, blank=False)
- logoname = models.CharField(max_length=64, null=False, blank=False)
- country = models.ForeignKey(Country, null=False)
+ sponsortype = models.ForeignKey(SponsorType, null=False)
+ name = models.CharField(max_length=128, null=False, blank=False)
+ url = models.URLField(null=False, blank=False)
+ logoname = models.CharField(max_length=64, null=False, blank=False)
+ country = models.ForeignKey(Country, null=False)
- purge_urls = ('/about/sponsors/', '/about/servers/', )
+ purge_urls = ('/about/sponsors/', '/about/servers/', )
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
- class Meta:
- ordering = ('name', )
+ class Meta:
+ ordering = ('name', )
class Server(models.Model):
- name = models.CharField(max_length=32, null=False, blank=False)
- sponsors = models.ManyToManyField(Sponsor)
- dedicated = models.BooleanField(null=False, default=True)
- performance = models.CharField(max_length=128, null=False, blank=False)
- os = models.CharField(max_length=32, null=False, blank=False)
- location = models.CharField(max_length=128, null=False, blank=False)
- usage = models.TextField(null=False, blank=False)
+ name = models.CharField(max_length=32, null=False, blank=False)
+ sponsors = models.ManyToManyField(Sponsor)
+ dedicated = models.BooleanField(null=False, default=True)
+ performance = models.CharField(max_length=128, null=False, blank=False)
+ os = models.CharField(max_length=32, null=False, blank=False)
+ location = models.CharField(max_length=128, null=False, blank=False)
+ usage = models.TextField(null=False, blank=False)
- purge_urls = ('/about/servers/', )
+ purge_urls = ('/about/servers/', )
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
- class Meta:
- ordering = ('name', )
+ class Meta:
+ ordering = ('name', )
diff --git a/pgweb/sponsors/struct.py b/pgweb/sponsors/struct.py
index 99ce243c..d128f35d 100644
--- a/pgweb/sponsors/struct.py
+++ b/pgweb/sponsors/struct.py
@@ -1,3 +1,3 @@
def get_struct():
- yield ('about/sponsors/', None)
- yield ('about/servers/', None)
+ yield ('about/sponsors/', None)
+ yield ('about/servers/', None)
diff --git a/pgweb/sponsors/views.py b/pgweb/sponsors/views.py
index 7b676f76..15bef9d4 100644
--- a/pgweb/sponsors/views.py
+++ b/pgweb/sponsors/views.py
@@ -5,13 +5,13 @@ from models import Sponsor, Server
@cache(minutes=30)
def sponsors(request):
- sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey' ,'?')
- return render_pgweb(request, 'about', 'sponsors/sponsors.html', {
- 'sponsors': sponsors,
- })
+ sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey' ,'?')
+ return render_pgweb(request, 'about', 'sponsors/sponsors.html', {
+ 'sponsors': sponsors,
+ })
def servers(request):
- servers = Server.objects.select_related().all()
- return render_pgweb(request, 'about', 'sponsors/servers.html', {
- 'servers': servers,
- })
+ servers = Server.objects.select_related().all()
+ return render_pgweb(request, 'about', 'sponsors/servers.html', {
+ 'servers': servers,
+ })
diff --git a/pgweb/survey/admin.py b/pgweb/survey/admin.py
index 23d62319..cdd9c4cd 100644
--- a/pgweb/survey/admin.py
+++ b/pgweb/survey/admin.py
@@ -2,12 +2,12 @@ from django.contrib import admin
from models import Survey, SurveyLock, SurveyAnswer
class SurveyAdmin(admin.ModelAdmin):
- list_display = ('question','posted','current',)
- ordering = ('-posted',)
+ list_display = ('question','posted','current',)
+ ordering = ('-posted',)
class SurveyAnswerAdmin(admin.ModelAdmin):
- list_display = ('survey','tot1','tot2','tot3','tot4','tot5','tot6','tot7','tot8')
- ordering = ('-survey__posted',)
+ list_display = ('survey','tot1','tot2','tot3','tot4','tot5','tot6','tot7','tot8')
+ ordering = ('-survey__posted',)
admin.site.register(Survey, SurveyAdmin)
admin.site.register(SurveyLock)
diff --git a/pgweb/survey/models.py b/pgweb/survey/models.py
index 9ea171ae..72cfe7c7 100644
--- a/pgweb/survey/models.py
+++ b/pgweb/survey/models.py
@@ -2,94 +2,94 @@ from django.db import models
# internal text/value object
class SurveyQuestion(object):
- def __init__(self, value, text):
- self.value = value
- self.text = text
+ def __init__(self, value, text):
+ self.value = value
+ self.text = text
class SurveyAnswerValues(object):
- def __init__(self, option, votes, votespercent):
- self.option = option
- self.votes = votes
- self.votespercent = votespercent
+ def __init__(self, option, votes, votespercent):
+ self.option = option
+ self.votes = votes
+ self.votespercent = votespercent
class Survey(models.Model):
- question = models.CharField(max_length=500, null=False, blank=False)
- opt1 = models.CharField(max_length=500, null=False, blank=False)
- opt2 = models.CharField(max_length=500, null=False, blank=False)
- opt3 = models.CharField(max_length=500, null=False, blank=True)
- opt4 = models.CharField(max_length=500, null=False, blank=True)
- opt5 = models.CharField(max_length=500, null=False, blank=True)
- opt6 = models.CharField(max_length=500, null=False, blank=True)
- opt7 = models.CharField(max_length=500, null=False, blank=True)
- opt8 = models.CharField(max_length=500, null=False, blank=True)
- posted = models.DateTimeField(null=False, auto_now_add=True)
- current = models.BooleanField(null=False, default=False)
+ question = models.CharField(max_length=500, null=False, blank=False)
+ opt1 = models.CharField(max_length=500, null=False, blank=False)
+ opt2 = models.CharField(max_length=500, null=False, blank=False)
+ opt3 = models.CharField(max_length=500, null=False, blank=True)
+ opt4 = models.CharField(max_length=500, null=False, blank=True)
+ opt5 = models.CharField(max_length=500, null=False, blank=True)
+ opt6 = models.CharField(max_length=500, null=False, blank=True)
+ opt7 = models.CharField(max_length=500, null=False, blank=True)
+ opt8 = models.CharField(max_length=500, null=False, blank=True)
+ posted = models.DateTimeField(null=False, auto_now_add=True)
+ current = models.BooleanField(null=False, default=False)
- purge_urls = ('/community/survey', '/community/$')
+ purge_urls = ('/community/survey', '/community/$')
- def __unicode__(self):
- return self.question
+ def __unicode__(self):
+ return self.question
- @property
- def questions(self):
- for i in range (1,9):
- v = getattr(self, "opt%s" % i)
- if not v: break
- yield SurveyQuestion(i, v)
+ @property
+ def questions(self):
+ for i in range (1,9):
+ v = getattr(self, "opt%s" % i)
+ if not v: break
+ yield SurveyQuestion(i, v)
- @property
- def answers(self):
- if not hasattr(self, "_answers"):
- self._answers = SurveyAnswer.objects.get_or_create(survey=self)[0]
- return self._answers
+ @property
+ def answers(self):
+ if not hasattr(self, "_answers"):
+ self._answers = SurveyAnswer.objects.get_or_create(survey=self)[0]
+ return self._answers
- @property
- def completeanswers(self):
- for a in self._get_complete_answers():
- yield SurveyAnswerValues(a[0], a[1], self.totalvotes>0 and (100*a[1]/self.totalvotes) or 0)
+ @property
+ def completeanswers(self):
+ for a in self._get_complete_answers():
+ yield SurveyAnswerValues(a[0], a[1], self.totalvotes>0 and (100*a[1]/self.totalvotes) or 0)
- @property
- def totalvotes(self):
- if not hasattr(self,"_totalvotes"):
- self._totalvotes = 0
- for a in self._get_complete_answers():
- self._totalvotes = self._totalvotes + a[1]
- return self._totalvotes
+ @property
+ def totalvotes(self):
+ if not hasattr(self,"_totalvotes"):
+ self._totalvotes = 0
+ for a in self._get_complete_answers():
+ self._totalvotes = self._totalvotes + a[1]
+ return self._totalvotes
- def _get_complete_answers(self):
- for i in range(1,9):
- q = getattr(self, "opt%s" % i)
- if not q: break
- n = getattr(self.answers, "tot%s" % i)
- yield (q,n)
+ def _get_complete_answers(self):
+ for i in range(1,9):
+ q = getattr(self, "opt%s" % i)
+ if not q: break
+ n = getattr(self.answers, "tot%s" % i)
+ yield (q,n)
- def save(self):
- # Make sure only one survey at a time can be the current one
- # (there may be some small race conditions here, but the likelihood
- # that two admins are editing the surveys at the same time...)
- if self.current:
- previous = Survey.objects.filter(current=True)
- for p in previous:
- if not p == self:
- p.current = False
- p.save() # primary key check avoids recursion
+ def save(self):
+ # Make sure only one survey at a time can be the current one
+ # (there may be some small race conditions here, but the likelihood
+ # that two admins are editing the surveys at the same time...)
+ if self.current:
+ previous = Survey.objects.filter(current=True)
+ for p in previous:
+ if not p == self:
+ p.current = False
+ p.save() # primary key check avoids recursion
- # Now that we've made any previously current ones non-current, we are
- # free to save this one.
- super(Survey, self).save()
+ # Now that we've made any previously current ones non-current, we are
+ # free to save this one.
+ super(Survey, self).save()
class SurveyAnswer(models.Model):
- survey = models.OneToOneField(Survey, null=False, blank=False, primary_key=True)
- tot1 = models.IntegerField(null=False, default=0)
- tot2 = models.IntegerField(null=False, default=0)
- tot3 = models.IntegerField(null=False, default=0)
- tot4 = models.IntegerField(null=False, default=0)
- tot5 = models.IntegerField(null=False, default=0)
- tot6 = models.IntegerField(null=False, default=0)
- tot7 = models.IntegerField(null=False, default=0)
- tot8 = models.IntegerField(null=False, default=0)
+ survey = models.OneToOneField(Survey, null=False, blank=False, primary_key=True)
+ tot1 = models.IntegerField(null=False, default=0)
+ tot2 = models.IntegerField(null=False, default=0)
+ tot3 = models.IntegerField(null=False, default=0)
+ tot4 = models.IntegerField(null=False, default=0)
+ tot5 = models.IntegerField(null=False, default=0)
+ tot6 = models.IntegerField(null=False, default=0)
+ tot7 = models.IntegerField(null=False, default=0)
+ tot8 = models.IntegerField(null=False, default=0)
- purge_urls = ('/community/survey', )
+ purge_urls = ('/community/survey', )
class SurveyLock(models.Model):
- ipaddr = models.GenericIPAddressField(null=False, blank=False)
- time = models.DateTimeField(null=False, auto_now_add=True)
+ ipaddr = models.GenericIPAddressField(null=False, blank=False)
+ time = models.DateTimeField(null=False, auto_now_add=True)
diff --git a/pgweb/survey/views.py b/pgweb/survey/views.py
index 09dd0738..ba3c09a3 100644
--- a/pgweb/survey/views.py
+++ b/pgweb/survey/views.py
@@ -11,52 +11,52 @@ from pgweb.util.helpers import HttpServerError
from models import Survey, SurveyAnswer, SurveyLock
def results(request, surveyid, junk=None):
- survey = get_object_or_404(Survey, pk=surveyid)
- surveylist = Survey.objects.all().order_by('-posted')
+ survey = get_object_or_404(Survey, pk=surveyid)
+ surveylist = Survey.objects.all().order_by('-posted')
- return render_pgweb(request, 'community', 'survey/results.html', {
- 'survey': survey,
- 'surveylist': surveylist,
- })
+ return render_pgweb(request, 'community', 'survey/results.html', {
+ 'survey': survey,
+ 'surveylist': surveylist,
+ })
# Served over insecure HTTP, the Varnish proxy strips cookies
@csrf_exempt
def vote(request, surveyid):
- surv = get_object_or_404(Survey, pk=surveyid)
+ surv = get_object_or_404(Survey, pk=surveyid)
- # Check that we have a valid answer number
- try:
- ansnum = int(request.POST['answer'])
- if ansnum < 1 or ansnum > 8:
- return HttpServerError(request, "Invalid answer")
- except:
- # When no answer is given, redirect to results instead
- return HttpResponseRedirect("/community/survey/%s-%s" % (surv.id, slugify(surv.question)))
- attrname = "tot%s" % ansnum
+ # Check that we have a valid answer number
+ try:
+ ansnum = int(request.POST['answer'])
+ if ansnum < 1 or ansnum > 8:
+ return HttpServerError(request, "Invalid answer")
+ except:
+ # When no answer is given, redirect to results instead
+ return HttpResponseRedirect("/community/survey/%s-%s" % (surv.id, slugify(surv.question)))
+ attrname = "tot%s" % ansnum
- # Do IP based locking...
- addr = get_client_ip(request)
+ # Do IP based locking...
+ addr = get_client_ip(request)
- # Clean out any old junk
- curs = connection.cursor()
- curs.execute("DELETE FROM survey_surveylock WHERE (\"time\" + '15 minutes') < now()")
+ # Clean out any old junk
+ curs = connection.cursor()
+ curs.execute("DELETE FROM survey_surveylock WHERE (\"time\" + '15 minutes') < now()")
- # Check if we are locked
- lock = SurveyLock.objects.filter(ipaddr=addr)
- if len(lock) > 0:
- return HttpServerError(request, "Too many requests from your IP in the past 15 minutes")
+ # Check if we are locked
+ lock = SurveyLock.objects.filter(ipaddr=addr)
+ if len(lock) > 0:
+ return HttpServerError(request, "Too many requests from your IP in the past 15 minutes")
- # Generate a new lock item, and store it
- lock = SurveyLock(ipaddr=addr)
- lock.save()
+ # Generate a new lock item, and store it
+ lock = SurveyLock(ipaddr=addr)
+ lock.save()
- answers = SurveyAnswer.objects.get_or_create(survey=surv)[0]
- setattr(answers, attrname, getattr(answers, attrname)+1)
- answers.save()
+ answers = SurveyAnswer.objects.get_or_create(survey=surv)[0]
+ setattr(answers, attrname, getattr(answers, attrname)+1)
+ answers.save()
- # Do explicit varnish purge, since it seems that the model doesn't
- # do it properly. Possibly because of the cute stuff we do with
- # getattr/setattr above.
- varnish_purge("/community/survey/%s/" % surveyid)
+ # Do explicit varnish purge, since it seems that the model doesn't
+ # do it properly. Possibly because of the cute stuff we do with
+ # getattr/setattr above.
+ varnish_purge("/community/survey/%s/" % surveyid)
- return HttpResponseRedirect("/community/survey/%s/" % surveyid)
+ return HttpResponseRedirect("/community/survey/%s/" % surveyid)
diff --git a/pgweb/urls.py b/pgweb/urls.py
index 4f89f6b6..c0b290d7 100644
--- a/pgweb/urls.py
+++ b/pgweb/urls.py
@@ -31,131 +31,131 @@ from pgweb.news.feeds import NewsFeed
from pgweb.events.feeds import EventFeed
urlpatterns = [
- url(r'^$', pgweb.core.views.home),
- url(r'^dyncss/(?P(.*?)(.*?)(.*?)
', unicode(m, 'utf8'), re.DOTALL)
- if r2:
- s = u"%s%s%s" % r2.groups()
- print "Source: %s" % hp.unescape(s).encode('utf-8')
- print ""
- else:
- print "Unknown status: %s" % headers['x-w3c-validator-status']
- print headers
- sys.exit(1)
-
-
+ r2 = re.search('(.*?)(.*?)(.*?)
', unicode(m, 'utf8'), re.DOTALL)
+ if r2:
+ s = u"%s%s%s" % r2.groups()
+ print "Source: %s" % hp.unescape(s).encode('utf-8')
+ print ""
+ else:
+ print "Unknown status: %s" % headers['x-w3c-validator-status']
+ print headers
+ sys.exit(1)
+
+
diff --git a/tools/purgehook/purgehook.py b/tools/purgehook/purgehook.py
index 36d259f3..bf2e9f9e 100755
--- a/tools/purgehook/purgehook.py
+++ b/tools/purgehook/purgehook.py
@@ -14,27 +14,27 @@ import psycopg2
# Templates that we don't want to ban automatically
BANNED_TEMPLATES=(
- 'base/base.html',
+ 'base/base.html',
)
if __name__ == "__main__":
- config = ConfigParser()
- config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'purgehook.ini'))
- conn = psycopg2.connect(config.get('db', 'dsn'))
- curs = conn.cursor()
+ config = ConfigParser()
+ config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'purgehook.ini'))
+ conn = psycopg2.connect(config.get('db', 'dsn'))
+ curs = conn.cursor()
- for l in sys.stdin:
- if l.startswith('templates/'):
- tmpl = l[len('templates/'):].strip()
- if not tmpl in BANNED_TEMPLATES:
- curs.execute("SELECT varnish_purge_xkey(%(key)s)", {
- 'key': 'pgwt_{0}'.format(hashlib.md5(tmpl).hexdigest()),
- })
- elif l.startswith('media/'):
- # For media we can't xkey, but the URL is exact so we can
- # use a classic single-url purge.
- curs.execute("SELECT varnish_purge('^/' || %(u)s || '$')", {
- 'u': l.strip(),
- })
- conn.commit()
- conn.close()
+ for l in sys.stdin:
+ if l.startswith('templates/'):
+ tmpl = l[len('templates/'):].strip()
+ if not tmpl in BANNED_TEMPLATES:
+ curs.execute("SELECT varnish_purge_xkey(%(key)s)", {
+ 'key': 'pgwt_{0}'.format(hashlib.md5(tmpl).hexdigest()),
+ })
+ elif l.startswith('media/'):
+ # For media we can't xkey, but the URL is exact so we can
+ # use a classic single-url purge.
+ curs.execute("SELECT varnish_purge('^/' || %(u)s || '$')", {
+ 'u': l.strip(),
+ })
+ conn.commit()
+ conn.close()
diff --git a/tools/search/crawler/lib/archives.py b/tools/search/crawler/lib/archives.py
index d566264c..7a2014ab 100644
--- a/tools/search/crawler/lib/archives.py
+++ b/tools/search/crawler/lib/archives.py
@@ -9,159 +9,159 @@ from lib.log import log
from lib.parsers import ArchivesParser
class MultiListCrawler(object):
- def __init__(self, lists, conn, status_interval=30, commit_interval=500):
- self.lists = lists
- self.conn = conn
- self.status_interval = status_interval
- self.commit_interval = commit_interval
+ def __init__(self, lists, conn, status_interval=30, commit_interval=500):
+ self.lists = lists
+ self.conn = conn
+ self.status_interval = status_interval
+ self.commit_interval = commit_interval
- self.queue = Queue()
- self.counter = 0
- self.counterlock = threading.RLock()
- self.stopevent = threading.Event()
+ self.queue = Queue()
+ self.counter = 0
+ self.counterlock = threading.RLock()
+ self.stopevent = threading.Event()
- def crawl(self, full=False, month=None):
- # Each thread can independently run on one month, so we can get
- # a reasonable spread. Therefor, submit them as separate jobs
- # to the queue.
- for listid, listname in self.lists:
- if full:
- # Generate a sequence of everything to index
- for year in range(1997, datetime.datetime.now().year+1):
- for month in range(1,13):
- self.queue.put((listid, listname, year, month, -1))
- elif month:
- # Do one specific month
- pieces = month.split("-")
- if len(pieces) != 2:
- print "Month format is