Fix whitespace and indentation, per pep8
authorMagnus Hagander <magnus@hagander.net>
Thu, 17 Jan 2019 19:47:43 +0000 (20:47 +0100)
committerMagnus Hagander <magnus@hagander.net>
Thu, 17 Jan 2019 19:47:43 +0000 (20:47 +0100)
114 files changed:
pgweb/account/admin.py
pgweb/account/forms.py
pgweb/account/models.py
pgweb/account/oauthclient.py
pgweb/account/recaptcha.py
pgweb/account/views.py
pgweb/contributors/admin.py
pgweb/contributors/models.py
pgweb/contributors/views.py
pgweb/core/admin.py
pgweb/core/feeds.py
pgweb/core/forms.py
pgweb/core/lookups.py
pgweb/core/management/commands/cleanup_old_records.py
pgweb/core/management/commands/fetch_rss_feeds.py
pgweb/core/management/commands/moderation_report.py
pgweb/core/management/commands/sessioninfo.py
pgweb/core/migrations/0001_initial.py
pgweb/core/models.py
pgweb/core/struct.py
pgweb/core/templatetags/pgfilters.py
pgweb/core/views.py
pgweb/docs/forms.py
pgweb/docs/models.py
pgweb/docs/struct.py
pgweb/docs/views.py
pgweb/downloads/admin.py
pgweb/downloads/forms.py
pgweb/downloads/models.py
pgweb/downloads/struct.py
pgweb/downloads/views.py
pgweb/events/admin.py
pgweb/events/feeds.py
pgweb/events/forms.py
pgweb/events/models.py
pgweb/events/struct.py
pgweb/events/views.py
pgweb/featurematrix/admin.py
pgweb/featurematrix/models.py
pgweb/featurematrix/views.py
pgweb/legacyurl/models.py
pgweb/legacyurl/views.py
pgweb/lists/forms.py
pgweb/lists/management/commands/sync_lists.py
pgweb/lists/models.py
pgweb/lists/views.py
pgweb/mailqueue/admin.py
pgweb/mailqueue/management/commands/send_queued_mail.py
pgweb/mailqueue/models.py
pgweb/mailqueue/util.py
pgweb/misc/forms.py
pgweb/misc/models.py
pgweb/misc/views.py
pgweb/news/admin.py
pgweb/news/feeds.py
pgweb/news/forms.py
pgweb/news/management/commands/twitter_post.py
pgweb/news/management/commands/twitter_register.py
pgweb/news/models.py
pgweb/news/struct.py
pgweb/news/views.py
pgweb/profserv/admin.py
pgweb/profserv/forms.py
pgweb/profserv/models.py
pgweb/profserv/struct.py
pgweb/profserv/views.py
pgweb/pugs/admin.py
pgweb/pugs/models.py
pgweb/pugs/views.py
pgweb/quotes/admin.py
pgweb/quotes/models.py
pgweb/search/models.py
pgweb/search/views.py
pgweb/security/admin.py
pgweb/security/management/commands/update_cve_links.py
pgweb/security/models.py
pgweb/security/views.py
pgweb/settings.py
pgweb/sponsors/models.py
pgweb/sponsors/views.py
pgweb/survey/admin.py
pgweb/survey/models.py
pgweb/survey/views.py
pgweb/util/admin.py
pgweb/util/auth.py
pgweb/util/contexts.py
pgweb/util/decorators.py
pgweb/util/helpers.py
pgweb/util/middleware.py
pgweb/util/misc.py
pgweb/util/moderation.py
pgweb/util/signals.py
pgweb/util/sitestruct.py
pgweb/util/templateloader.py
tools/communityauth/generate_cryptkey.py
tools/communityauth/sample/django/auth.py
tools/communityauth/test_auth.py
tools/docs/docload.py
tools/ftp/spider_ftp.py
tools/ftp/spider_yum.py
tools/localhtmlvalidate/localhtmlvalidate.py
tools/purgehook/purgehook.py
tools/search/crawler/lib/archives.py
tools/search/crawler/lib/basecrawler.py
tools/search/crawler/lib/genericsite.py
tools/search/crawler/lib/log.py
tools/search/crawler/lib/parsers.py
tools/search/crawler/lib/sitemapsite.py
tools/search/crawler/lib/threadwrapper.py
tools/search/crawler/listcrawler.py
tools/search/crawler/listsync.py
tools/search/crawler/webcrawler.py
tools/varnishqueue/nagios_check.py
tools/varnishqueue/varnish_queue.py

index 6127db3c9e16ece54802f477670477cc90fc4d6f..3c1771c09cedab7110b5524e78c9dd952b75cb65 100644 (file)
@@ -8,6 +8,7 @@ import base64
 
 from models import CommunityAuthSite, CommunityAuthOrg
 
+
 class CommunityAuthSiteAdminForm(forms.ModelForm):
     class Meta:
         model = CommunityAuthSite
@@ -24,9 +25,11 @@ class CommunityAuthSiteAdminForm(forms.ModelForm):
             raise forms.ValidationError("Crypto key must be 16, 24 or 32 bytes before being base64-encoded")
         return self.cleaned_data['cryptkey']
 
+
 class CommunityAuthSiteAdmin(admin.ModelAdmin):
     form = CommunityAuthSiteAdminForm
 
+
 class PGUserChangeForm(UserChangeForm):
     """just like UserChangeForm, butremoves "username" requirement"""
     def __init__(self, *args, **kwargs):
@@ -38,6 +41,7 @@ class PGUserChangeForm(UserChangeForm):
         if self.fields.get('username'):
             del self.fields['username']
 
+
 class PGUserAdmin(UserAdmin):
     """overrides default Django user admin"""
     form = PGUserChangeForm
@@ -48,7 +52,8 @@ class PGUserAdmin(UserAdmin):
             return self.readonly_fields + ('username',)
         return self.readonly_fields
 
+
 admin.site.register(CommunityAuthSite, CommunityAuthSiteAdmin)
 admin.site.register(CommunityAuthOrg)
-admin.site.unregister(User) # have to unregister default User Admin...
-admin.site.register(User, PGUserAdmin) # ...in order to add overrides
+admin.site.unregister(User)  # have to unregister default User Admin...
+admin.site.register(User, PGUserAdmin)  # ...in order to add overrides
index d7b67d3258f642f9a45732e141227d58bdb6ecf3..a1652c71bebe26da141c027669c34d688362e09f 100644 (file)
@@ -12,6 +12,7 @@ from recaptcha import ReCaptchaField
 import logging
 log = logging.getLogger(__name__)
 
+
 def _clean_username(username):
     username = username.lower()
 
@@ -23,6 +24,7 @@ def _clean_username(username):
         return username
     raise forms.ValidationError("This username is already in use")
 
+
 # Override some error handling only in the default authentication form
 class PgwebAuthenticationForm(AuthenticationForm):
     def clean(self):
@@ -38,6 +40,7 @@ class PgwebAuthenticationForm(AuthenticationForm):
                 return self.cleaned_data
             raise e
 
+
 class CommunityAuthConsentForm(forms.Form):
     consent = forms.BooleanField(help_text='Consent to sharing this data')
     next = forms.CharField(widget=forms.widgets.HiddenInput())
@@ -48,6 +51,7 @@ class CommunityAuthConsentForm(forms.Form):
 
         self.fields['consent'].label = 'Consent to sharing data with {0}'.format(self.orgname)
 
+
 class SignupForm(forms.Form):
     username = forms.CharField(max_length=30)
     first_name = forms.CharField(max_length=30)
@@ -84,6 +88,7 @@ class SignupForm(forms.Form):
             return email
         raise forms.ValidationError("A user with this email address is already registered")
 
+
 class SignupOauthForm(forms.Form):
     username = forms.CharField(max_length=30)
     first_name = forms.CharField(max_length=30, required=False)
@@ -106,25 +111,30 @@ class SignupOauthForm(forms.Form):
     def clean_email(self):
         return self.cleaned_data['email'].lower()
 
+
 class UserProfileForm(forms.ModelForm):
     class Meta:
         model = UserProfile
         exclude = ('user',)
 
+
 class UserForm(forms.ModelForm):
     def __init__(self, *args, **kwargs):
         super(UserForm, self).__init__(*args, **kwargs)
         self.fields['first_name'].required = True
         self.fields['last_name'].required = True
+
     class Meta:
         model = User
         fields = ('first_name', 'last_name', )
 
+
 class ContributorForm(forms.ModelForm):
     class Meta:
         model = Contributor
         exclude = ('ctype', 'lastname', 'firstname', 'user', )
 
+
 class ChangeEmailForm(forms.Form):
     email = forms.EmailField()
     email2 = forms.EmailField(label="Repeat email")
@@ -156,5 +166,6 @@ class ChangeEmailForm(forms.Form):
             raise forms.ValidationError("Email addresses don't match")
         return email2
 
+
 class PgwebPasswordResetForm(forms.Form):
     email = forms.EmailField()
index 0c4de48b4f51db99cc48c65b8041177280fcf47f..8db9c5f7c00ea31e57d7a49512d1d934126f26ea 100644 (file)
@@ -1,6 +1,7 @@
 from django.db import models
 from django.contrib.auth.models import User
 
+
 class CommunityAuthOrg(models.Model):
     orgname = models.CharField(max_length=100, null=False, blank=False,
                                help_text="Name of the organisation")
@@ -9,6 +10,7 @@ class CommunityAuthOrg(models.Model):
     def __unicode__(self):
         return self.orgname
 
+
 class CommunityAuthSite(models.Model):
     name = models.CharField(max_length=100, null=False, blank=False,
                             help_text="Note that the value in this field is shown on the login page, so make sure it's user-friendly!")
@@ -23,6 +25,7 @@ class CommunityAuthSite(models.Model):
     def __unicode__(self):
         return self.name
 
+
 class CommunityAuthConsent(models.Model):
     user = models.ForeignKey(User, null=False, blank=False)
     org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False)
@@ -31,6 +34,7 @@ class CommunityAuthConsent(models.Model):
     class Meta:
         unique_together = (('user', 'org'), )
 
+
 class EmailChangeToken(models.Model):
     user = models.OneToOneField(User, null=False, blank=False)
     email = models.EmailField(max_length=75, null=False, blank=False)
index 27c798486e9950309eb483be83c32d0dea6a9f98..432a5188efcfaeee5ebdebd2b6e0767a0ac860d1 100644 (file)
@@ -14,6 +14,7 @@ log = logging.getLogger(__name__)
 class OAuthException(Exception):
     pass
 
+
 #
 # Generic OAuth login for multiple providers
 #
@@ -97,10 +98,13 @@ def oauth_login_google(request):
         'google',
         'https://accounts.google.com/o/oauth2/v2/auth',
         'https://accounts.google.com/o/oauth2/token',
-        ['https://www.googleapis.com/auth/userinfo.email',
-             'https://www.googleapis.com/auth/userinfo.profile'],
+        [
+            'https://www.googleapis.com/auth/userinfo.email',
+            'https://www.googleapis.com/auth/userinfo.profile'
+        ],
         _google_auth_data)
 
+
 #
 # Github login
 #  Registration: https://github.com/settings/developers
@@ -119,7 +123,7 @@ def oauth_login_github(request):
         else:
             # Some github accounts have no name on them, so we can just
             # let the user fill it out manually in that case.
-            n = ['','']
+            n = ['', '']
         # Email is at a separate endpoint
         r = oa.get('https://api.github.com/user/emails').json()
         for e in r:
@@ -139,6 +143,7 @@ def oauth_login_github(request):
         ['user:email', ],
         _github_auth_data)
 
+
 #
 # Facebook login
 #  Registration: https://developers.facebook.com/apps
@@ -181,7 +186,7 @@ def oauth_login_microsoft(request):
         'microsoft',
         'https://login.live.com/oauth20_authorize.srf',
         'https://login.live.com/oauth20_token.srf',
-        ['wl.basic', 'wl.emails' ],
+        ['wl.basic', 'wl.emails', ],
         _microsoft_auth_data)
 
 
index 9b66795553e97b71a30f807051c45972bf6de9f6..e86bd2a836e6c1e5a527ffff998905d1a305b2ff 100644 (file)
@@ -14,6 +14,7 @@ import json
 import logging
 log = logging.getLogger(__name__)
 
+
 class ReCaptchaWidget(forms.widgets.Widget):
     def render(self, name, value, attrs=None):
         if settings.NOCAPTCHA:
index f210013ec75cd4fa24989e2eae437523c9e8ee05..a3f37e648d8cec7fce7b6662c5a3f05b769a71a4 100644 (file)
@@ -44,7 +44,8 @@ log = logging.getLogger(__name__)
 
 # The value we store in user.password for oauth logins. This is
 # a value that must not match any hashers.
-OAUTH_PASSWORD_STORE='oauth_signin_account_no_password'
+OAUTH_PASSWORD_STORE = 'oauth_signin_account_no_password'
+
 
 @login_required
 def home(request):
@@ -61,6 +62,7 @@ def home(request):
         'profservs': myprofservs,
     })
 
+
 objtypes = {
     'news': {
         'title': 'News Article',
@@ -85,6 +87,7 @@ objtypes = {
     },
 }
 
+
 @login_required
 @transaction.atomic
 def profile(request):
@@ -128,11 +131,12 @@ def profile(request):
             contribform = ContributorForm(instance=contrib)
 
     return render_pgweb(request, 'account', 'account/userprofileform.html', {
-            'userform': userform,
-            'profileform': profileform,
-            'contribform': contribform,
-            'can_change_email': can_change_email,
-            })
+        'userform': userform,
+        'profileform': profileform,
+        'contribform': contribform,
+        'can_change_email': can_change_email,
+    })
+
 
 @login_required
 @transaction.atomic
@@ -158,12 +162,13 @@ def change_email(request):
                                      token=generate_random_token())
             token.save()
 
-            send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
-                               form.cleaned_data['email'],
-                               'Your postgresql.org community account',
-                               'account/email_change_email.txt',
-                               { 'token': token , 'user': request.user, }
-                           )
+            send_template_mail(
+                settings.ACCOUNTS_NOREPLY_FROM,
+                form.cleaned_data['email'],
+                'Your postgresql.org community account',
+                'account/email_change_email.txt',
+                {'token': token, 'user': request.user, }
+            )
             return HttpResponseRedirect('done/')
     else:
         form = ChangeEmailForm(request.user)
@@ -171,7 +176,8 @@ def change_email(request):
     return render_pgweb(request, 'account', 'account/emailchangeform.html', {
         'form': form,
         'token': token,
-        })
+    })
+
 
 @login_required
 @transaction.atomic
@@ -193,7 +199,8 @@ def confirm_change_email(request, tokenhash):
     return render_pgweb(request, 'account', 'account/emailchangecompleted.html', {
         'token': tokenhash,
         'success': token and True or False,
-        })
+    })
+
 
 @login_required
 def listobjects(request, objtype):
@@ -211,24 +218,28 @@ def listobjects(request, objtype):
         'suburl': objtype,
     })
 
+
 @login_required
 def orglist(request):
     orgs = Organisation.objects.filter(approved=True)
 
     return render_pgweb(request, 'account', 'account/orglist.html', {
-            'orgs': orgs,
+        'orgs': orgs,
     })
 
+
 def login(request):
     return authviews.login(request, template_name='account/login.html',
                            authentication_form=PgwebAuthenticationForm,
                            extra_context={
-                               'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())],
+                               'oauth_providers': [(k, v) for k, v in sorted(settings.OAUTH.items())],
                            })
 
+
 def logout(request):
     return authviews.logout_then_login(request, login_url='/')
 
+
 def changepwd(request):
     if hasattr(request.user, 'password') and request.user.password == OAUTH_PASSWORD_STORE:
         return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.")
@@ -238,6 +249,7 @@ def changepwd(request):
                                      template_name='account/password_change.html',
                                      post_change_redirect='/account/changepwd/done/')
 
+
 def resetpwd(request):
     # Basic django password reset feature is completely broken. For example, it does not support
     # resetting passwords for users with "old hashes", which means they have no way to ever
@@ -255,32 +267,36 @@ def resetpwd(request):
         if form.is_valid():
             log.info("Initiating password set from {0} for {1}".format(get_client_ip(request), form.cleaned_data['email']))
             token = default_token_generator.make_token(u)
-            send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
-                               form.cleaned_data['email'],
-                               'Password reset for your postgresql.org account',
-                               'account/password_reset_email.txt',
-                               {
-                                   'user': u,
-                                   'uid': urlsafe_base64_encode(force_bytes(u.pk)),
-                                   'token': token,
-                               },
+            send_template_mail(
+                settings.ACCOUNTS_NOREPLY_FROM,
+                form.cleaned_data['email'],
+                'Password reset for your postgresql.org account',
+                'account/password_reset_email.txt',
+                {
+                    'user': u,
+                    'uid': urlsafe_base64_encode(force_bytes(u.pk)),
+                    'token': token,
+                },
             )
             return HttpResponseRedirect('/account/reset/done/')
     else:
         form = PgwebPasswordResetForm()
 
     return render_pgweb(request, 'account', 'account/password_reset.html', {
-            'form': form,
+        'form': form,
     })
 
+
 def change_done(request):
     log.info("Password change done from {0}".format(get_client_ip(request)))
     return authviews.password_change_done(request, template_name='account/password_change_done.html')
 
+
 def reset_done(request):
     log.info("Password reset done from {0}".format(get_client_ip(request)))
     return authviews.password_reset_done(request, template_name='account/password_reset_done.html')
 
+
 def reset_confirm(request, uidb64, token):
     log.info("Confirming password reset for uidb {0}, token {1} from {2}".format(uidb64, token, get_client_ip(request)))
     return authviews.password_reset_confirm(request,
@@ -289,10 +305,12 @@ def reset_confirm(request, uidb64, token):
                                             template_name='account/password_reset_confirm.html',
                                             post_reset_redirect='/account/reset/complete/')
 
+
 def reset_complete(request):
     log.info("Password reset completed for user from {0}".format(get_client_ip(request)))
     return authviews.password_reset_complete(request, template_name='account/password_reset_complete.html')
 
+
 @script_sources('https://www.google.com/recaptcha/')
 @script_sources('https://www.gstatic.com/recaptcha/')
 @frame_sources('https://www.google.com/')
@@ -326,7 +344,7 @@ def signup(request):
                                form.cleaned_data['email'],
                                'Your new postgresql.org community account',
                                'account/new_account_email.txt',
-                               { 'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user}
+                               {'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user}
                                )
 
             return HttpResponseRedirect('/account/signup/complete/')
@@ -334,16 +352,16 @@ def signup(request):
         form = SignupForm(get_client_ip(request))
 
     return render_pgweb(request, 'account', 'base/form.html', {
-            'form': form,
-            'formitemtype': 'Account',
-            'form_intro': """
+        'form': form,
+        'formitemtype': 'Account',
+        'form_intro': """
 To sign up for a free community account, enter your preferred userid and email address.
 Note that a community account is only needed if you want to submit information - all
 content is available for reading without an account.
 """,
-            'savebutton': 'Sign up',
-            'operation': 'New',
-            'recaptcha': True,
+        'savebutton': 'Sign up',
+        'operation': 'New',
+        'recaptcha': True,
     })
 
 
@@ -429,12 +447,12 @@ def signup_oauth(request):
         'operation': 'New account',
         'savebutton': 'Sign up for new account',
         'recaptcha': True,
-        })
+    })
+
 
 ####
-## Community authentication endpoint
+# Community authentication endpoint
 ####
-
 def communityauth(request, siteid):
     # Get whatever site the user is trying to log in to.
     site = get_object_or_404(CommunityAuthSite, pk=siteid)
@@ -476,23 +494,24 @@ def communityauth(request, siteid):
             nexturl = request.POST['next']
         else:
             nexturl = '/account/auth/%s/%s' % (siteid, urldata)
-        return authviews.login(request, template_name='account/login.html',
-                               authentication_form=PgwebAuthenticationForm,
-                               extra_context={
-                                   'sitename': site.name,
-                                   'next': nexturl,
-                                   'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())],
-                               },
-                           )
+        return authviews.login(
+            request, template_name='account/login.html',
+            authentication_form=PgwebAuthenticationForm,
+            extra_context={
+                'sitename': site.name,
+                'next': nexturl,
+                'oauth_providers': [(k, v) for k, v in sorted(settings.OAUTH.items())],
+            },
+        )
 
     # When we reach this point, the user *has* already been authenticated.
     # The request variable "su" *may* contain a suburl and should in that
     # case be passed along to the site we're authenticating for. And of
     # course, we fill a structure with information about the user.
 
-    if request.user.first_name=='' or request.user.last_name=='' or request.user.email=='':
+    if request.user.first_name == '' or request.user.last_name == '' or request.user.email == '':
         return render_pgweb(request, 'account', 'account/communityauth_noinfo.html', {
-                })
+        })
 
     # Check for cooloff period
     if site.cooloff_hours > 0:
@@ -501,7 +520,7 @@ def communityauth(request, siteid):
                 request.user.username, site.name))
             return render_pgweb(request, 'account', 'account/communityauth_cooloff.html', {
                 'site': site,
-                })
+            })
 
     if site.org.require_consent:
         if not CommunityAuthConsent.objects.filter(org=site.org, user=request.user).exists():
@@ -513,7 +532,7 @@ def communityauth(request, siteid):
         'f': request.user.first_name.encode('utf-8'),
         'l': request.user.last_name.encode('utf-8'),
         'e': request.user.email.encode('utf-8'),
-        }
+    }
     if d:
         info['d'] = d.encode('utf-8')
     elif su:
@@ -525,16 +544,16 @@ def communityauth(request, siteid):
 
     # Encrypt it with the shared key (and IV!)
     r = Random.new()
-    iv = r.read(16) # Always 16 bytes for AES
+    iv = r.read(16)  # Always 16 bytes for AES
     encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
-    cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
+    cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16)))  # Pad to even 16 bytes
 
     # Generate redirect
     return HttpResponseRedirect("%s?i=%s&d=%s" % (
-            site.redirecturl,
-            base64.b64encode(iv, "-_"),
-            base64.b64encode(cipher, "-_"),
-            ))
+        site.redirecturl,
+        base64.b64encode(iv, "-_"),
+        base64.b64encode(cipher, "-_"),
+    ))
 
 
 def communityauth_logout(request, siteid):
@@ -547,6 +566,7 @@ def communityauth_logout(request, siteid):
     # Redirect user back to the specified suburl
     return HttpResponseRedirect("%s?s=logout" % site.redirecturl)
 
+
 @login_required
 def communityauth_consent(request, siteid):
     org = get_object_or_404(CommunityAuthSite, id=siteid).org
@@ -554,7 +574,7 @@ def communityauth_consent(request, siteid):
         form = CommunityAuthConsentForm(org.orgname, data=request.POST)
         if form.is_valid():
             CommunityAuthConsent.objects.get_or_create(user=request.user, org=org,
-                                                       defaults={'consentgiven':datetime.now()},
+                                                       defaults={'consentgiven': datetime.now()},
                                                        )
             return HttpResponseRedirect(form.cleaned_data['next'])
     else:
@@ -571,9 +591,9 @@ def communityauth_consent(request, siteid):
 def _encrypt_site_response(site, s):
     # Encrypt it with the shared key (and IV!)
     r = Random.new()
-    iv = r.read(16) # Always 16 bytes for AES
+    iv = r.read(16)  # Always 16 bytes for AES
     encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
-    cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
+    cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16)))  # Pad to even 16 bytes
 
     # Base64-encode the response, just to be consistent
     return "%s&%s" % (
@@ -581,6 +601,7 @@ def _encrypt_site_response(site, s):
         base64.b64encode(cipher, '-_'),
     )
 
+
 def communityauth_search(request, siteid):
     # Perform a search for users. The response will be encrypted with the site
     # key to prevent abuse, therefor we need the site.
@@ -605,6 +626,7 @@ def communityauth_search(request, siteid):
 
     return HttpResponse(_encrypt_site_response(site, j))
 
+
 def communityauth_getkeys(request, siteid, since=None):
     # Get any updated ssh keys for community accounts.
     # The response will be encrypted with the site key to prevent abuse,
index 2f0ec4f21bd30b0714e9a2d5379158a5d27c93ed..e8a3107e07edfee388c6f1af753ceacfdd0b07f5 100644 (file)
@@ -7,6 +7,7 @@ from pgweb.core.lookups import UserLookup
 
 from models import Contributor, ContributorType
 
+
 class ContributorAdminForm(forms.ModelForm):
     class Meta:
         model = Contributor
@@ -20,8 +21,10 @@ class ContributorAdminForm(forms.ModelForm):
         self.fields['user'].widget.can_add_related = False
         self.fields['user'].widget.can_change_related = False
 
+
 class ContributorAdmin(admin.ModelAdmin):
     form = ContributorAdminForm
 
+
 admin.site.register(ContributorType)
 admin.site.register(Contributor, ContributorAdmin)
index 43cd3baed615c3a860234cab5e5a0604327b8e6e..f75f8c85988402c0975a4491fedcea505e447f7d 100644 (file)
@@ -1,6 +1,7 @@
 from django.db import models
 from django.contrib.auth.models import User
 
+
 class ContributorType(models.Model):
     typename = models.CharField(max_length=32, null=False, blank=False)
     sortorder = models.IntegerField(null=False, default=100)
@@ -16,6 +17,7 @@ class ContributorType(models.Model):
     class Meta:
         ordering = ('sortorder',)
 
+
 class Contributor(models.Model):
     ctype = models.ForeignKey(ContributorType)
     lastname = models.CharField(max_length=100, null=False, blank=False)
@@ -27,7 +29,7 @@ class Contributor(models.Model):
     contribution = models.TextField(null=True, blank=True)
     user = models.ForeignKey(User, null=True, blank=True)
 
-    send_notification=True
+    send_notification = True
     purge_urls = ('/community/contributors/', )
 
     def __unicode__(self):
index 0be074a42c7d480ed6b727e8c1d503047a93a6d8..da068aa55206197e4585c2052eaddd5fb476f95f 100644 (file)
@@ -2,6 +2,7 @@ from pgweb.util.contexts import render_pgweb
 
 from models import ContributorType
 
+
 def completelist(request):
     contributortypes = list(ContributorType.objects.all())
     return render_pgweb(request, 'community', 'contributors/list.html', {
index b03f1de0009d3b7aad94137a3dbf69feefc731f8..0bfc57da738754409ccde2c22ac3fc70841c4504 100644 (file)
@@ -9,6 +9,7 @@ from pgweb.core.models import ModerationNotification
 
 from pgweb.core.lookups import UserLookup
 
+
 class OrganisationAdminForm(forms.ModelForm):
     class Meta:
         model = Organisation
@@ -23,6 +24,7 @@ class OrganisationAdminForm(forms.ModelForm):
         self.fields['managers'].widget.can_change_related = False
         self.fields['managers'].widget.can_delete_related = False
 
+
 class OrganisationAdmin(admin.ModelAdmin):
     form = OrganisationAdminForm
     list_display = ('name', 'approved', 'lastconfirmed',)
@@ -30,13 +32,14 @@ class OrganisationAdmin(admin.ModelAdmin):
     ordering = ('name', )
     search_fields = ('name', )
 
+
 class VersionAdmin(admin.ModelAdmin):
     list_display = ('versionstring', 'reldate', 'supported', 'current', )
 
+
 admin.site.register(Version, VersionAdmin)
 admin.site.register(OrganisationType)
 admin.site.register(Organisation, OrganisationAdmin)
 admin.site.register(ImportedRSSFeed)
 admin.site.register(ImportedRSSItem)
 admin.site.register(ModerationNotification)
-
index 45a0ee67476d8a6dd2e36ddad392c7a2d20451d8..99b268a9f461a296b9ae8c1b4af3c2e8ec945a61 100644 (file)
@@ -4,6 +4,7 @@ from models import Version
 
 from datetime import datetime, time
 
+
 class VersionFeed(Feed):
     title = "PostgreSQL latest versions"
     link = "https://www.postgresql.org/"
@@ -19,4 +20,4 @@ class VersionFeed(Feed):
         return "https://www.postgresql.org/docs/%s/%s" % (obj.numtree, obj.relnotes)
 
     def item_pubdate(self, obj):
-        return datetime.combine(obj.reldate,time.min)
+        return datetime.combine(obj.reldate, time.min)
index 80e41876de5eca6eee68bce8a73243f6cd6c39f5..ac0716c43cc6173d38cd5d635572adfb6c36ec89 100644 (file)
@@ -4,6 +4,7 @@ from django.forms import ValidationError
 from models import Organisation
 from django.contrib.auth.models import User
 
+
 class OrganisationForm(forms.ModelForm):
     remove_manager = forms.ModelMultipleChoiceField(required=False, queryset=None, label="Current manager(s)", help_text="Select one or more managers to remove")
     add_manager = forms.EmailField(required=False)
@@ -54,6 +55,7 @@ class OrganisationForm(forms.ModelForm):
     def apply_submitter(self, model, User):
         model.managers.add(User)
 
+
 class MergeOrgsForm(forms.Form):
     merge_into = forms.ModelChoiceField(queryset=Organisation.objects.all())
     merge_from = forms.ModelChoiceField(queryset=Organisation.objects.all())
index 3f64cb2e22989a563b633420392be95a415d396c..3d42ffe5027fa2ae49ed7624cb1b98246574b8de 100644 (file)
@@ -22,4 +22,5 @@ class UserLookup(ModelLookup):
         # Display for choice listings
         return u"%s (%s)" % (item.username, item.get_full_name())
 
+
 registry.register(UserLookup)
index 4fda869358dda7b5de0ecc1d17578d3a99720f2c..27d3bc13b0ccbf944f50b9017398ffb621dd7d9a 100644 (file)
@@ -17,6 +17,7 @@ from datetime import datetime, timedelta
 
 from pgweb.account.models import EmailChangeToken
 
+
 class Command(BaseCommand):
     help = 'Cleanup old records'
 
@@ -32,4 +33,4 @@ class Command(BaseCommand):
 
         # Clean up old email change tokens
         with transaction.atomic():
-                EmailChangeToken.objects.filter(sentat__lt=datetime.now()-timedelta(hours=24)).delete()
+                EmailChangeToken.objects.filter(sentat__lt=datetime.now() - timedelta(hours=24)).delete()
index 44c6ee7cc26d9002352d976e2e821913b2c019d4..39bfc9c7cf636a0801a9c56d63fa64842342f274 100644 (file)
@@ -13,6 +13,7 @@ from datetime import datetime
 
 from pgweb.core.models import ImportedRSSFeed, ImportedRSSItem
 
+
 class Command(BaseCommand):
     help = 'Fetch RSS feeds'
 
@@ -27,7 +28,7 @@ class Command(BaseCommand):
                     if not hasattr(feed, 'status'):
                         # bozo_excpetion can seemingly be set when there is no error as well,
                         # so make sure we only check if we didn't get a status.
-                        if hasattr(feed,'bozo_exception'):
+                        if hasattr(feed, 'bozo_exception'):
                             raise Exception('Feed load error %s' % feed.bozo_exception)
                         raise Exception('Feed load error with no exception!')
                     if feed.status != 200:
@@ -38,10 +39,11 @@ class Command(BaseCommand):
                         try:
                             item = ImportedRSSItem.objects.get(feed=importfeed, url=entry.link)
                         except ImportedRSSItem.DoesNotExist:
-                            item = ImportedRSSItem(feed=importfeed,
-                                                   title=entry.title[:100],
-                                                   url=entry.link,
-                                                   posttime=datetime(*(entry.published_parsed[0:6])),
+                            item = ImportedRSSItem(
+                                feed=importfeed,
+                                title=entry.title[:100],
+                                url=entry.link,
+                                posttime=datetime(*(entry.published_parsed[0:6])),
                             )
                             item.save()
                             fetchedsomething = True
index cdc3f28ab4fc7e50baf2ac1313a12f1a9afb7a06..c1be3ffb8fa203d341844e010895c2500de8845b 100644 (file)
@@ -14,6 +14,7 @@ from datetime import datetime
 from pgweb.util.moderation import get_all_pending_moderations
 from pgweb.util.misc import send_template_mail
 
+
 class Command(BaseCommand):
     help = 'Send moderation report'
 
@@ -28,4 +29,4 @@ class Command(BaseCommand):
                                    "core/moderation_report.txt",
                                    {
                                        'items': counts,
-                })
+                                   })
index aa5aaa3e24a9c056c977b5cc0291f77817c0f9be..0b8e2fdb8d11edb7177092b0ec319eaceae4911e 100644 (file)
@@ -6,6 +6,7 @@ from django.core.management.base import BaseCommand, CommandError
 from django.contrib.sessions.models import Session
 from django.contrib.auth.models import User
 
+
 class Command(BaseCommand):
     help = 'Dump interesting information about a session'
 
@@ -34,9 +35,8 @@ class Command(BaseCommand):
                 session.pop(k, None)
             if session:
                 print " -- Other session values --"
-                for k,v in session.items():
-                    print u"{0:20} {1}".format(k,v)
+                for k, v in session.items():
+                    print u"{0:20} {1}".format(k, v)
 
         except Session.DoesNotExist:
             raise CommandError('Session not found')
-
index 65fb2bab2275e6e3b85f85f203dd41d5a0c4d384..2f493d30744f202c9208c4ed6a0b394a2938c8b6 100644 (file)
@@ -6,6 +6,7 @@ from django.conf import settings
 
 import pgweb.core.models
 
+
 class Migration(migrations.Migration):
 
     dependencies = [
index d1d74bd92fed75fdfcce81f9874dbc192135c6b9..93a8b8f9eccefe2d97b12b4e07d42ff0d54a1dca 100644 (file)
@@ -10,9 +10,10 @@ TESTING_CHOICES = (
     (1, 'Release candidate'),
     (2, 'Beta'),
     (3, 'Alpha'),
-    )
+)
 TESTING_SHORTSTRING = ('', 'rc', 'beta', 'alpha')
 
+
 class Version(models.Model):
     tree = models.DecimalField(max_digits=3, decimal_places=1, null=False, blank=False, unique=True)
     latestminor = models.IntegerField(null=False, blank=False, default=0, help_text="For testing versions, latestminor means latest beta/rc number. For other releases, it's the latest minor release number in the tree.")
@@ -63,7 +64,7 @@ class Version(models.Model):
             for p in previous:
                 if not p == self:
                     p.current = False
-                    p.save() # primary key check avoids recursion
+                    p.save()  # primary key check avoids recursion
 
         # Now that we've made any previously current ones non-current, we are
         # free to save this one.
@@ -95,6 +96,7 @@ class Country(models.Model):
     def __unicode__(self):\r
         return self.name
 
+
 class Language(models.Model):
     # Import data from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
     # (yes, there is a UTF16 BOM in the UTF8 file)
@@ -111,12 +113,14 @@ class Language(models.Model):
     def __unicode__(self):
         return self.name
 
+
 class OrganisationType(models.Model):
     typename = models.CharField(max_length=32, null=False, blank=False)
 
     def __unicode__(self):
         return self.typename
 
+
 class Organisation(models.Model):
     name = models.CharField(max_length=100, null=False, blank=False, unique=True)
     approved = models.BooleanField(null=False, default=False)
@@ -151,6 +155,7 @@ class ImportedRSSFeed(models.Model):
     def __unicode__(self):
         return self.internalname
 
+
 class ImportedRSSItem(models.Model):
     feed = models.ForeignKey(ImportedRSSFeed)
     title = models.CharField(max_length=100, null=False, blank=False)
@@ -167,6 +172,8 @@ class ImportedRSSItem(models.Model):
 
 # From man sshd, except for ssh-dss
 _valid_keytypes = ['ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519', 'ssh-rsa']
+
+
 # Options, keytype, key, comment. But we don't support options.
 def validate_sshkey(key):
     lines = key.splitlines()
@@ -185,12 +192,14 @@ def validate_sshkey(key):
         except:
             raise ValidationError("Incorrect base64 encoded key!")
 
+
 # Extra attributes for users (if they have them)
 class UserProfile(models.Model):
     user = models.OneToOneField(User, null=False, blank=False, primary_key=True)
-    sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text= "Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ])
+    sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text="Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ])
     lastmodified = models.DateTimeField(null=False, blank=False, auto_now=True)
 
+
 # Notifications sent for any moderated content.
 # Yes, we uglify it by storing the type of object as a string, so we don't
 # end up with a bazillion fields being foreign keys. Ugly, but works.
@@ -198,7 +207,7 @@ class ModerationNotification(models.Model):
     objectid = models.IntegerField(null=False, blank=False, db_index=True)
     objecttype = models.CharField(null=False, blank=False, max_length=100)
     text = models.TextField(null=False, blank=False)
-    author = models.CharField(null=False,  blank=False, max_length=100)
+    author = models.CharField(null=False, blank=False, max_length=100)
     date = models.DateTimeField(null=False, blank=False, auto_now=True)
 
     def __unicode__(self):
index 2829d1a5e357d981a9c632bc4b139bf6d8036426..e2b65a6a3c5fcba71ba8f31131fc905c188e7606 100644 (file)
@@ -1,5 +1,6 @@
 import os
 
+
 def get_struct():
     yield ('', None)
     yield ('community/', None)
index 2d8cce99fd23414cf18a8bd0d60ebb3ace390fec..a166c6ffd09907641743b6762ed7878c85307652 100644 (file)
@@ -5,10 +5,12 @@ import json
 
 register = template.Library()
 
+
 @register.filter(name='class_name')
 def class_name(ob):
     return ob.__class__.__name__
 
+
 @register.filter(is_safe=True)
 def field_class(value, arg):
     if 'class' in value.field.widget.attrs:
@@ -17,45 +19,54 @@ def field_class(value, arg):
         c = arg
     return value.as_widget(attrs={"class": c})
 
+
 @register.filter(name='hidemail')
 @stringfilter
 def hidemail(value):
     return value.replace('@', ' at ')
 
+
 @register.filter(is_safe=True)
 def ischeckbox(obj):
     return obj.field.widget.__class__.__name__ in ["CheckboxInput", "CheckboxSelectMultiple"] and not getattr(obj.field, 'regular_field', False)
 
+
 @register.filter(is_safe=True)
 def ismultiplecheckboxes(obj):
     return obj.field.widget.__class__.__name__ == "CheckboxSelectMultiple" and not getattr(obj.field, 'regular_field', False)
 
+
 @register.filter(is_safe=True)
 def isrequired_error(obj):
     if obj.errors and obj.errors[0] == u"This field is required.":
         return True
     return False
 
+
 @register.filter(is_safe=True)
 def label_class(value, arg):
     return value.label_tag(attrs={'class': arg})
 
+
 @register.filter()
 def planet_author(obj):
     # takes a ImportedRSSItem object from a Planet feed and extracts the author
     # information from the title
     return obj.title.split(':')[0]
 
+
 @register.filter()
 def planet_title(obj):
     # takes a ImportedRSSItem object from a Planet feed and extracts the info
     # specific to the title of the Planet entry
     return ":".join(obj.title.split(':')[1:])
 
+
 @register.filter(name='dictlookup')
 def dictlookup(value, key):
     return value.get(key, None)
 
+
 @register.filter(name='json')
 def tojson(value):
     return json.dumps(value)
index 509a52bc21daaf26a47eb635a2749bae4985578b..f307dd6092481307613b6ef73973769aa3ed4215 100644 (file)
@@ -37,6 +37,7 @@ from pgweb.survey.models import Survey
 from models import Organisation
 from forms import OrganisationForm, MergeOrgsForm
 
+
 # Front page view
 @cache(minutes=10)
 def home(request):
@@ -68,6 +69,7 @@ def home(request):
         'planet': planet,
     })
 
+
 # About page view (contains information about PostgreSQL + random quotes)
 @cache(minutes=10)
 def about(request):
@@ -77,6 +79,7 @@ def about(request):
         'quotes': quotes,
     })
 
+
 # Community main page (contains surveys and potentially more)
 def community(request):
     s = Survey.objects.filter(current=True)
@@ -90,13 +93,17 @@ def community(request):
         'planet': planet,
     })
 
+
 # List of supported versions
 def versions(request):
     return render_pgweb(request, 'support', 'support/versioning.html', {
-            'versions': Version.objects.filter(tree__gt=0).filter(testing=0),
+        'versions': Version.objects.filter(tree__gt=0).filter(testing=0),
     })
 
+
 re_staticfilenames = re.compile("^[0-9A-Z/_-]+$", re.IGNORECASE)
+
+
 # Generic fallback view for static pages
 def fallback(request, url):
     if url.find('..') > -1:
@@ -116,13 +123,14 @@ def fallback(request, url):
     # Guestimate the nav section by looking at the URL and taking the first
     # piece of it.
     try:
-        navsect = url.split('/',2)[0]
+        navsect = url.split('/', 2)[0]
     except:
         navsect = ''
     c = PGWebContextProcessor(request)
     c.update({'navmenu': get_nav_menu(navsect)})
     return HttpResponse(t.render(c))
 
+
 # Edit-forms for core objects
 @login_required
 def organisationform(request, itemid):
@@ -132,6 +140,7 @@ def organisationform(request, itemid):
     return simple_form(Organisation, itemid, request, OrganisationForm,
                        redirect='/account/edit/organisations/')
 
+
 # robots.txt
 def robots(request):
     return HttpResponse("""User-agent: *
@@ -154,7 +163,7 @@ def _make_sitemap(pagelist):
     x.startElement('urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'})
     pages = 0
     for p in pagelist:
-        pages+=1
+        pages += 1
         x.startElement('url', {})
         x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0]))
         if len(p) > 1 and p[1]:
@@ -166,11 +175,13 @@ def _make_sitemap(pagelist):
     x.endDocument()
     return resp
 
+
 # Sitemap (XML format)
 @cache(hours=6)
 def sitemap(request):
     return _make_sitemap(get_all_pages_struct())
 
+
 # Internal sitemap (only for our own search engine)
 # Note! Still served up to anybody who wants it, so don't
 # put anything secret in it...
@@ -178,17 +189,19 @@ def sitemap(request):
 def sitemap_internal(request):
     return _make_sitemap(get_all_pages_struct(method='get_internal_struct'))
 
+
 # dynamic CSS serving, meaning we merge a number of different CSS into a
 # single one, making sure it turns into a single http response. We do this
 # dynamically, since the output will be cached.
 _dynamic_cssmap = {
     'base': ['media/css/main.css',
-             'media/css/normalize.css',],
+             'media/css/normalize.css', ],
     'docs': ['media/css/global.css',
              'media/css/table.css',
              'media/css/text.css',
              'media/css/docs.css'],
-    }
+}
+
 
 @cache(hours=6)
 def dynamic_css(request, css):
@@ -228,41 +241,46 @@ def dynamic_css(request, css):
 
     return resp
 
+
 @nocache
 def csrf_failure(request, reason=''):
     resp = render(request, 'errors/csrf_failure.html', {
-            'reason': reason,
-            })
-    resp.status_code = 403 # Forbidden
+        'reason': reason,
+    })
+    resp.status_code = 403  # Forbidden
     return resp
 
+
 # Basic information about the connection
 @cache(seconds=30)
 def system_information(request):
-    return render(request,'core/system_information.html', {
-            'server': os.uname()[1],
-            'cache_server': request.META['REMOTE_ADDR'] or None,
-            'client_ip': get_client_ip(request),
-            'django_version': django.get_version(),
+    return render(request, 'core/system_information.html', {
+        'server': os.uname()[1],
+        'cache_server': request.META['REMOTE_ADDR'] or None,
+        'client_ip': get_client_ip(request),
+        'django_version': django.get_version(),
     })
 
+
 # Sync timestamp for automirror. Keep it around for 30 seconds
 # Basically just a check that we can access the backend still...
 @cache(seconds=30)
 def sync_timestamp(request):
     s = datetime.now().strftime("%Y-%m-%d %H:%M:%S\n")
-    r = HttpResponse(s,    content_type='text/plain')
+    r = HttpResponse(s, content_type='text/plain')
     r['Content-Length'] = len(s)
     return r
 
+
 # List of all unapproved objects, for the special admin page
 @login_required
 @user_passes_test(lambda u: u.is_staff)
 @user_passes_test(lambda u: u.groups.filter(name='pgweb moderators').exists())
 def admin_pending(request):
     return render(request, 'core/admin_pending.html', {
-            'app_list': get_all_pending_moderations(),
-            })
+        'app_list': get_all_pending_moderations(),
+    })
+
 
 # Purge objects from varnish, for the admin pages
 @login_required
@@ -297,8 +315,9 @@ def admin_purge(request):
     latest = curs.fetchall()
 
     return render(request, 'core/admin_purge.html', {
-            'latest_purges': latest,
-            })
+        'latest_purges': latest,
+    })
+
 
 @csrf_exempt
 def api_varnish_purge(request):
@@ -313,6 +332,7 @@ def api_varnish_purge(request):
         curs.execute("SELECT varnish_purge_expr(%s)", (expr, ))
     return HttpResponse("Purged %s entries\n" % n)
 
+
 # Merge two organisations
 @login_required
 @user_passes_test(lambda u: u.is_superuser)
@@ -346,5 +366,5 @@ def admin_mergeorg(request):
         form = MergeOrgsForm()
 
     return render(request, 'core/admin_mergeorg.html', {
-            'form': form,
+        'form': form,
     })
index c26695ff846d8fbc1e6b1eeb5a25f64b2e81ee48..96df3a7db4d498e81f4534b83f3f604712773106 100644 (file)
@@ -1,8 +1,9 @@
 from django import forms
 
+
 class DocCommentForm(forms.Form):
     name = forms.CharField(max_length=100, required=True, label='Your Name')
     email = forms.EmailField(max_length=100, required=True, label='Your Email')
     shortdesc = forms.CharField(max_length=100, required=True, label="Subject")
     details = forms.CharField(required=True, widget=forms.Textarea,
-        label="What is your comment?")
+                              label="What is your comment?")
index 7a522147457cfb7cdb1e3dd5a03e8b3d7c6dc8c4..2e6124177291bc798cd8a3b6a5e1806631af7a55 100644 (file)
@@ -1,6 +1,7 @@
 from django.db import models
 from pgweb.core.models import Version
 
+
 class DocPage(models.Model):
     id = models.AutoField(null=False, primary_key=True)
     file = models.CharField(max_length=64, null=False, blank=False)
@@ -20,6 +21,7 @@ class DocPage(models.Model):
         # Index file first, because we want to list versions by file
         unique_together = [('file', 'version')]
 
+
 class DocPageAlias(models.Model):
     file1 = models.CharField(max_length=64, null=False, blank=False, unique=True)
     file2 = models.CharField(max_length=64, null=False, blank=False, unique=True)
@@ -30,4 +32,4 @@ class DocPageAlias(models.Model):
     # XXX: needs a unique functional index as well, see the migration!
     class Meta:
         db_table = 'docsalias'
-        verbose_name_plural='Doc page aliases'
+        verbose_name_plural = 'Doc page aliases'
index 942d05733988c809cfef957d2a4c96ac8547b727..78c3f9331b6fb7bd65653fe981449a8df7c5a90d 100644 (file)
@@ -1,6 +1,7 @@
 from django.db import connection
 from pgweb.core.models import Version
 
+
 def get_struct():
     currentversion = Version.objects.get(current=True)
 
@@ -31,7 +32,7 @@ def get_struct():
             version = int(version)
 
         yield ('docs/%s/%s' % (version, filename),
-               testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio
+               testing and 0.1 or docprio,  # beta/rc versions always get 0.1 in prio
                loaded)
 
         # Also yield the current version urls, with the highest
@@ -40,6 +41,7 @@ def get_struct():
             yield ('docs/current/%s' % filename,
                    1.0, loaded)
 
+
 # For our internal sitemap (used only by our own search engine),
 # include the devel version of the docs (and only those, since the
 # other versions are already included)
index d0936272861e6d32db87b4c4cdd8f3e948c8bc1c..3934e568a03b9ed5da9411104e6b907e5b0f82ba 100644 (file)
@@ -17,6 +17,7 @@ from pgweb.core.models import Version
 from models import DocPage
 from forms import DocCommentForm
 
+
 @allow_frames
 @content_sources('style', "'unsafe-inline'")
 def docpage(request, version, filename):
@@ -57,9 +58,9 @@ def docpage(request, version, filename):
         where=["file=%s OR file IN (SELECT file2 FROM docsalias WHERE file1=%s) OR file IN (SELECT file1 FROM docsalias WHERE file2=%s)"],
         params=[fullname, fullname, fullname],
         select={
-            'supported':"COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')",
-            'testing':"COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)",
-    }).order_by('-supported', 'version').only('version', 'file')
+            'supported': "COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')",
+            'testing': "COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)",
+        }).order_by('-supported', 'version').only('version', 'file')
 
     return render(request, 'docs/docspage.html', {
         'page': page,
@@ -71,6 +72,7 @@ def docpage(request, version, filename):
         'loaddate': loaddate,
     })
 
+
 def docspermanentredirect(request, version, typ, page, *args):
     """Provides a permanent redirect from the old static/interactive pages to
     the modern pages that do not have said keywords.
@@ -80,18 +82,22 @@ def docspermanentredirect(request, version, typ, page, *args):
         url += page
     return HttpResponsePermanentRedirect(url)
 
+
 def docsrootpage(request, version):
     return docpage(request, version, 'index')
 
+
 def redirect_root(request, version):
     return HttpResponsePermanentRedirect("/docs/%s/" % version)
 
+
 def root(request):
-    versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
+    versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0, tree__gt=0)).order_by('-tree')
     return render_pgweb(request, 'docs', 'docs/index.html', {
         'versions': versions,
     })
 
+
 class _VersionPdfWrapper(object):
     """
     A wrapper around a version that knows to look for PDF files, and
@@ -110,26 +116,31 @@ class _VersionPdfWrapper(object):
             self.indexname = 'postgres.html'
         else:
             self.indexname = 'index.html'
+
     def __getattr__(self, name):
         return getattr(self.__version, name)
+
     def _find_pdf(self, pagetype):
         try:
             return os.stat('%s/documentation/pdf/%s/postgresql-%s-%s.pdf' % (settings.STATIC_CHECKOUT, self.__version.numtree, self.__version.numtree, pagetype)).st_size
         except:
             return 0
 
+
 def manuals(request):
-    versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
+    versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0, tree__gt=0)).order_by('-tree')
     return render_pgweb(request, 'docs', 'docs/manuals.html', {
         'versions': [_VersionPdfWrapper(v) for v in versions],
     })
 
+
 def manualarchive(request):
-    versions = Version.objects.filter(testing=0,supported=False,tree__gt=0).order_by('-tree')
+    versions = Version.objects.filter(testing=0, supported=False, tree__gt=0).order_by('-tree')
     return render_pgweb(request, 'docs', 'docs/archive.html', {
         'versions': [_VersionPdfWrapper(v) for v in versions],
     })
 
+
 @login_required
 def commentform(request, itemid, version, filename):
     v = get_object_or_404(Version, tree=version)
index 2449acbdd91a66bfceec84f6c115e8525716532b..9b5ed3ef8e33803fba573e3a0dd745ebe727ea53 100644 (file)
@@ -7,23 +7,27 @@ import re
 from pgweb.util.admin import PgwebAdmin
 from models import StackBuilderApp, Category, Product, LicenceType
 
+
 class ProductAdmin(PgwebAdmin):
     list_display = ('name', 'org', 'approved', 'lastconfirmed',)
     list_filter = ('approved',)
     search_fields = ('name', 'description', )
     ordering = ('name', )
 
+
 def duplicate_stackbuilderapp(modeladmin, request, queryset):
     # Duplicate each individual selected object, but turn off
     # the active flag if it's on.
     for o in queryset:
-        o.id = None # Triggers creation of a new object
+        o.id = None  # Triggers creation of a new object
         o.active = False
         o.textid = o.textid + "_new"
         o.save()
 
+
 duplicate_stackbuilderapp.short_description = "Duplicate application"
 
+
 class StackBuilderAppAdminForm(forms.ModelForm):
     class Meta:
         model = StackBuilderApp
@@ -47,11 +51,13 @@ class StackBuilderAppAdminForm(forms.ModelForm):
                 raise ValidationError("Dependency '%s' does not exist!" % d)
         return self.cleaned_data['txtdependencies']
 
+
 class StackBuilderAppAdmin(admin.ModelAdmin):
     list_display = ('textid', 'active', 'name', 'platform', 'version', )
     actions = [duplicate_stackbuilderapp, ]
     form = StackBuilderAppAdminForm
 
+
 admin.site.register(Category)
 admin.site.register(LicenceType)
 admin.site.register(Product, ProductAdmin)
index 150e0b701e5f2e352d49c88b5a17addeedb3c960..2faf63ec342692ec2eaa00ff4c3d8902313164f7 100644 (file)
@@ -3,13 +3,17 @@ from django import forms
 from pgweb.core.models import Organisation
 from models import Product
 
+
 class ProductForm(forms.ModelForm):
     form_intro = """Note that in order to register a new product, you must first register an organisation.
 If you have not done so, use <a href="/account/organisations/new/">this form</a>."""
+
     def __init__(self, *args, **kwargs):
         super(ProductForm, self).__init__(*args, **kwargs)
+
     def filter_by_user(self, user):
         self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+
     class Meta:
         model = Product
         exclude = ('lastconfirmed', 'approved', )
index 682fdd9272bd651390813d43f429d87860156f35..003f0364f0ec6647b779e2834565f07e2abc61bc 100644 (file)
@@ -13,6 +13,7 @@ class Category(models.Model):
     class Meta:
         ordering = ('catname',)
 
+
 class LicenceType(models.Model):
     typename = models.CharField(max_length=100, null=False, blank=False)
 
@@ -22,6 +23,7 @@ class LicenceType(models.Model):
     class Meta:
         ordering = ('typename',)
 
+
 class Product(models.Model):
     name = models.CharField(max_length=100, null=False, blank=False, unique=True)
     approved = models.BooleanField(null=False, default=False)
@@ -45,17 +47,27 @@ class Product(models.Model):
     class Meta:
         ordering = ('name',)
 
+
 class StackBuilderApp(models.Model):
     textid = models.CharField(max_length=100, null=False, blank=False)
     version = models.CharField(max_length=20, null=False, blank=False)
     platform = models.CharField(max_length=20, null=False, blank=False,
-        choices= (('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), ('osx', 'Mac OS X'),
-            ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
-    )
+                                choices=(
+                                    ('windows', 'Windows (32-bit)'),
+                                    ('windows-x64', 'Windows (64-bit)'),
+                                    ('osx', 'Mac OS X'),
+                                    ('linux', 'Linux (32-bit)'),
+                                    ('linux-x64', 'Linux (64-bit)'),
+                                ))
     secondaryplatform = models.CharField(max_length=20, null=False, blank=True,
-        choices= (('', 'None'), ('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'),
-            ('osx', 'Mac OS X'), ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
-    )
+                                         choices=(
+                                             ('', 'None'),
+                                             ('windows', 'Windows (32-bit)'),
+                                             ('windows-x64', 'Windows (64-bit)'),
+                                             ('osx', 'Mac OS X'),
+                                             ('linux', 'Linux (32-bit)'),
+                                             ('linux-x64', 'Linux (64-bit)')
+                                         ))
     name = models.CharField(max_length=500, null=False, blank=False)
     active = models.BooleanField(null=False, blank=False, default=True)
     description = models.TextField(null=False, blank=False)
@@ -63,10 +75,14 @@ class StackBuilderApp(models.Model):
     pgversion = models.CharField(max_length=5, null=False, blank=True)
     edbversion = models.CharField(max_length=5, null=False, blank=True)
     format = models.CharField(max_length=5, null=False, blank=False,
-        choices = (('bin', 'Linux .bin'), ('app', 'Mac .app'),
-               ('pkg', 'Mac .pkg'), ('mpkg', 'Mac .mpkg'),
-               ('exe', 'Windows .exe'), ('msi', 'Windows .msi'))
-    )
+                              choices=(
+                                  ('bin', 'Linux .bin'),
+                                  ('app', 'Mac .app'),
+                                  ('pkg', 'Mac .pkg'),
+                                  ('mpkg', 'Mac .mpkg'),
+                                  ('exe', 'Windows .exe'),
+                                  ('msi', 'Windows .msi')
+                              ))
     installoptions = models.CharField(max_length=500, null=False, blank=True)
     upgradeoptions = models.CharField(max_length=500, null=False, blank=True)
     checksum = models.CharField(max_length=32, null=False, blank=False)
index 27a92072ef7e99b5fcf816ed3920134920cc6b56..1b3fb8de2da12ef544bd40e1e4c4ba92ff1d0240 100644 (file)
@@ -1,5 +1,6 @@
 from models import Category
 
+
 def get_struct():
     # Products
     for c in Category.objects.all():
index b51f829e099cd11b20b2baa363a2f3801d70dde5..7ccdbb3a555dc418b04d14f41e8f78f330d1f6a3 100644 (file)
@@ -17,6 +17,7 @@ from pgweb.core.models import Version
 from models import Category, Product, StackBuilderApp
 from forms import ProductForm
 
+
 #######
 # FTP browser
 #######
@@ -30,7 +31,7 @@ def ftpbrowser(request, subpath):
             raise Http404
         subpath = subpath.strip('/')
     else:
-        subpath=""
+        subpath = ""
 
     # Pickle up the list of things we need
     try:
@@ -73,19 +74,19 @@ def ftpbrowser(request, subpath):
     del allnodes
 
     # Add all directories
-    directories = [{'link': k, 'url': k, 'type': 'd'} for k,v in node.items() if v['t'] == 'd']
+    directories = [{'link': k, 'url': k, 'type': 'd'} for k, v in node.items() if v['t'] == 'd']
     # Add all symlinks (only directories supported)
-    directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k,v in node.items() if v['t'] == 'l'])
+    directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k, v in node.items() if v['t'] == 'l'])
 
     # A ittle early sorting wouldn't go amiss, so .. ends up at the top
-    directories.sort(key = version_sort, reverse=True)
+    directories.sort(key=version_sort, reverse=True)
 
     # Add a link to the parent directory
     if subpath:
-        directories.insert(0, {'link':'[Parent Directory]', 'url':'..'})
+        directories.insert(0, {'link': '[Parent Directory]', 'url': '..'})
 
     # Fetch files
-    files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k,v in node.items() if v['t'] == 'f']
+    files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k, v in node.items() if v['t'] == 'f']
 
     breadcrumbs = []
     if subpath:
@@ -98,12 +99,12 @@ def ftpbrowser(request, subpath):
                 breadroot = "%s/%s" % (breadroot, pathpiece)
             else:
                 breadroot = pathpiece
-            breadcrumbs.append({'name': pathpiece, 'path': breadroot});
+            breadcrumbs.append({'name': pathpiece, 'path': breadroot})
 
     # Check if there are any "content files" we should render directly on the webpage
-    file_readme = (node.has_key('README') and node['README']['t']=='f') and node['README']['c'] or None;
-    file_message = (node.has_key('.message') and node['.message']['t']=='f') and node['.message']['c'] or None;
-    file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None;
+    file_readme = (node.has_key('README') and node['README']['t'] == 'f') and node['README']['c'] or None
+    file_message = (node.has_key('.message') and node['.message']['t'] == 'f') and node['.message']['c'] or None
+    file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None
 
     del node
 
@@ -153,6 +154,7 @@ def uploadftp(request):
     # Finally, indicate to the client that we're happy
     return HttpResponse("OK", content_type="text/plain")
 
+
 @csrf_exempt
 def uploadyum(request):
     if request.method != 'PUT':
@@ -182,6 +184,7 @@ def uploadyum(request):
     # Finally, indicate to the client that we're happy
     return HttpResponse("OK", content_type="text/plain")
 
+
 @nocache
 def mirrorselect(request, path):
     # Old access to mirrors will just redirect to the main ftp site.
@@ -197,7 +200,8 @@ def yum_js(request):
     return render(request, 'downloads/js/yum.js', {
         'json': jsonstr,
         'supported_versions': ','.join([str(v.numtree) for v in Version.objects.filter(supported=True)]),
-        }, content_type='application/json')
+    }, content_type='application/json')
+
 
 #######
 # Product catalogue
@@ -208,20 +212,23 @@ def categorylist(request):
         'categories': categories,
     })
 
+
 def productlist(request, catid, junk=None):
     category = get_object_or_404(Category, pk=catid)
-    products = Product.objects.select_related('org','licencetype').filter(category=category, approved=True)
+    products = Product.objects.select_related('org', 'licencetype').filter(category=category, approved=True)
     return render_pgweb(request, 'download', 'downloads/productlist.html', {
         'category': category,
         'products': products,
         'productcount': len(products),
     })
 
+
 @login_required
 def productform(request, itemid):
     return simple_form(Product, itemid, request, ProductForm,
                        redirect='/account/edit/products/')
 
+
 #######
 # Stackbuilder
 #######
index 2ac51df12f247d4f1b80decc5a93cd482eebddb4..343d378f0723741adebc3b9ef81367fe2845ae88 100644 (file)
@@ -4,14 +4,18 @@ from django import forms
 from pgweb.util.admin import PgwebAdmin
 from models import Event
 
+
 def approve_event(modeladmin, request, queryset):
     # We need to do this in a loop even though it's less efficient,
     # since using queryset.update() will not send the moderation messages.
     for e in queryset:
         e.approved = True
         e.save()
+
+
 approve_event.short_description = 'Approve event'
 
+
 class EventAdminForm(forms.ModelForm):
     class Meta:
         model = Event
@@ -28,6 +32,7 @@ class EventAdminForm(forms.ModelForm):
                 del cleaned_data['country']
         return cleaned_data
 
+
 class EventAdmin(PgwebAdmin):
     list_display = ('title', 'org', 'startdate', 'enddate', 'approved',)
     list_filter = ('approved',)
index 21aa0d4bdaa1625496478203ee3faba1afa8fd63..9b8bed489fc33c864570c181e4e1216606328ded 100644 (file)
@@ -4,6 +4,7 @@ from models import Event
 
 from datetime import datetime, time
 
+
 class EventFeed(Feed):
     title = description = "PostgreSQL events"
     link = "https://www.postgresql.org/"
@@ -18,4 +19,4 @@ class EventFeed(Feed):
         return "https://www.postgresql.org/about/event/%s/" % obj.id
 
     def item_pubdate(self, obj):
-        return datetime.combine(obj.startdate,time.min)
+        return datetime.combine(obj.startdate, time.min)
index 460c5a761697f8f43b6fb88ca7fc77ae9a9067bd..6522723776b4de4fb95bccaafbf8bddbcad0e068 100644 (file)
@@ -4,16 +4,19 @@ from django.forms import ValidationError
 from pgweb.core.models import Organisation
 from models import Event
 
+
 class EventForm(forms.ModelForm):
     toggle_fields = [
         {
             'name': 'isonline',
             'invert': True,
-            'fields': ['city', 'state', 'country',]
+            'fields': ['city', 'state', 'country', ]
         },
     ]
+
     def __init__(self, *args, **kwargs):
         super(EventForm, self).__init__(*args, **kwargs)
+
     def filter_by_user(self, user):
         self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
 
index f956d931ddcc058b3d281bf05f485abb55d4eaa3..fa94c14a3fd9b9a5bc266fdf921c225198d9d459 100644 (file)
@@ -2,6 +2,7 @@ from django.db import models
 
 from pgweb.core.models import Country, Language, Organisation
 
+
 class Event(models.Model):
     approved = models.BooleanField(null=False, blank=False, default=False)
 
@@ -42,13 +43,13 @@ class Event(models.Model):
         mgrs = self.org.managers.all()
         if len(mgrs) == 1:
             if mgrs[0].pk == 0:
-                return False # Migration organisation
+                return False  # Migration organisation
             else:
-                return True # Has an actual organisation
+                return True  # Has an actual organisation
         elif len(mgrs) > 1:
             # More than one manager means it must be new
             return True
-        return False # Has no organisastion at all
+        return False  # Has no organisastion at all
 
     @property
     def displaydate(self):
@@ -67,4 +68,4 @@ class Event(models.Model):
             return "%s, %s" % (self.city, self.country)
 
     class Meta:
-        ordering = ('-startdate','-enddate',)
+        ordering = ('-startdate', '-enddate', )
index e60303dd22ca41da7ef1b5a6c6b001d98720722b..c83c635f04e912de5cbd35e90ebe8b21b6de27e6 100644 (file)
@@ -1,6 +1,7 @@
 from datetime import date
 from models import Event
 
+
 def get_struct():
     now = date.today()
 
@@ -14,4 +15,4 @@ def get_struct():
         if yearsold > 4:
             yearsold = 4
         yield ('about/event/%s/' % n.id,
-               0.5-(yearsold/10.0))
+               0.5 - (yearsold / 10.0))
index d0fa7da8ef5864822a5f421a05ce0880809cb0f6..9c4cdb9ebfe4b5308ebda035a49eaeba6d674918 100644 (file)
@@ -10,31 +10,35 @@ from pgweb.util.helpers import simple_form
 from models import Event
 from forms import EventForm
 
+
 def main(request):
     community_events = Event.objects.select_related('country').filter(approved=True, badged=True).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
     other_events = Event.objects.select_related('country').filter(approved=True, badged=False).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
     return render_pgweb(request, 'about', 'events/archive.html', {
         'title': 'Upcoming Events',
         'eventblocks': (
-            { 'name': 'Community Events', 'events': community_events, 'link': '',},
-            { 'name': 'Other Events', 'events': other_events, 'link': '',},
+            {'name': 'Community Events', 'events': community_events, 'link': '', },
+            {'name': 'Other Events', 'events': other_events, 'link': '', },
         ),
     })
 
+
 def _eventarchive(request, title):
     # Hardcode to the latest 100 events. Do we need paging too?
     events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100]
     return render_pgweb(request, 'about', 'events/archive.html', {
-            'title': '%s Archive' % title,
-            'archive': True,
-            'eventblocks': (
-                {'name': title, 'events': events, },
-                ),
+        'title': '%s Archive' % title,
+        'archive': True,
+        'eventblocks': (
+            {'name': title, 'events': events, },
+        ),
     })
 
+
 def archive(request):
     return _eventarchive(request, 'Event')
 
+
 def item(request, itemid, throwaway=None):
     event = get_object_or_404(Event, pk=itemid)
     if not event.approved:
@@ -43,6 +47,7 @@ def item(request, itemid, throwaway=None):
         'obj': event,
     })
 
+
 @login_required
 def form(request, itemid):
     return simple_form(Event, itemid, request, EventForm,
index 1581c830c21c36b8cbc30cdb90ba9f295dac452e..c5c286c3142ef74e475eafcad09edd80beb0c6b6 100644 (file)
@@ -2,18 +2,22 @@ from django.contrib import admin
 
 from models import Feature, FeatureGroup
 
+
 class FeatureInline(admin.TabularInline):
     model = Feature
 
+
 class FeatureGroupAdmin(admin.ModelAdmin):
     inlines = [FeatureInline, ]
     list_display = ('groupname', 'groupsort')
     ordering = ['groupsort']
 
+
 class FeatureAdmin(admin.ModelAdmin):
     list_display = ('featurename', 'group')
     list_filter = ('group',)
     search_fields = ('featurename',)
 
+
 admin.site.register(FeatureGroup, FeatureGroupAdmin)
 admin.site.register(Feature, FeatureAdmin)
index 9ed6cbd6d5d9df42f9322b20dcd30d68153999a4..033c238f36de732d9321c639dcccdeaa2616d7cc 100644 (file)
@@ -1,12 +1,13 @@
 from django.db import models
 
 choices_map = {
0: {'str': 'No',       'class': 'no', 'bgcolor': '#ffdddd'},
1: {'str': 'Yes',      'class': 'yes', 'bgcolor': '#ddffdd'},
- 2: {'str': 'Obsolete', 'class': 'obs', 'bgcolor': '#ddddff'},
3: {'str': '?',        'class': 'unk', 'bgcolor': '#ffffaa'},
   0: {'str': 'No', 'class': 'no', 'bgcolor': '#ffdddd'},
   1: {'str': 'Yes', 'class': 'yes', 'bgcolor': '#ddffdd'},
   2: {'str': 'Obsolete', 'class': 'obs', 'bgcolor': '#ddddff'},
   3: {'str': '?', 'class': 'unk', 'bgcolor': '#ffffaa'},
 }
-choices = [(k, v['str']) for k,v in choices_map.items()]
+choices = [(k, v['str']) for k, v in choices_map.items()]
+
 
 class FeatureGroup(models.Model):
     groupname = models.CharField(max_length=100, null=False, blank=False)
@@ -20,13 +21,14 @@ class FeatureGroup(models.Model):
     @property
     def columns(self):
         # Return a list of all the columns for the matrix
-        return [b for a,b in versions]
+        return [b for a, b in versions]
+
 
 class Feature(models.Model):
     group = models.ForeignKey(FeatureGroup, null=False, blank=False)
     featurename = models.CharField(max_length=100, null=False, blank=False)
     featuredescription = models.TextField(null=False, blank=True)
-    #WARNING! All fields that start with "v" will be considered versions!
+    # WARNING! All fields that start with "v" will be considered versions!
     v74 = models.IntegerField(verbose_name="7.4", null=False, blank=False, default=0, choices=choices)
     v74.visible_default = False
     v80 = models.IntegerField(verbose_name="8.0", null=False, blank=False, default=0, choices=choices)
@@ -53,7 +55,7 @@ class Feature(models.Model):
 
     def columns(self):
         # Get a list of column based on all versions that are visible_default
-        return [choices_map[getattr(self, a)] for a,b in versions]
+        return [choices_map[getattr(self, a)] for a, b in versions]
 
     @property
     def featurelink(self):
@@ -62,5 +64,6 @@ class Feature(models.Model):
         else:
             return 'detail/%s/' % self.id
 
-versions = [(f.name,f.verbose_name) for f in Feature()._meta.fields if f.name.startswith('v') and getattr(f, 'visible_default', True)]
+
+versions = [(f.name, f.verbose_name) for f in Feature()._meta.fields if f.name.startswith('v') and getattr(f, 'visible_default', True)]
 versions = sorted(versions, key=lambda f: -float(f[1]))
index a50c9fd049cd1ddaad1aff31622eba975d6e1883..5ad800e3d5983d53b4f8fade92e29b0f3f9329eb 100644 (file)
@@ -5,6 +5,7 @@ from pgweb.util.contexts import render_pgweb
 from pgweb.core.models import Version
 from models import Feature
 
+
 def root(request):
     features = Feature.objects.all().select_related().order_by('group__groupsort', 'group__groupname', 'featurename')
     groups = []
@@ -29,6 +30,7 @@ def root(request):
         'versions': versions,
     })
 
+
 def detail(request, featureid):
     feature = get_object_or_404(Feature, pk=featureid)
     return render_pgweb(request, 'about', 'featurematrix/featuredetail.html', {
index 54f60fe2160bf2f1632a45929af6f2c53fb94c38..0b4331b362b98a4e5947c491612f95a3c2e56686 100644 (file)
@@ -1,3 +1,3 @@
-#from django.db import models
+# from django.db import models
 
 # Create your models here.
index 5fda8fca7cf7ef677b468b4f6551c34a62ccf821..437f2a205ebaaf62f40eabba3c73f72e11757db8 100644 (file)
@@ -1,5 +1,6 @@
 from django.http import HttpResponseRedirect
 
+
 def mailpref(request, listname):
     # Just redirect to the homepage of pglister, don't try specific lists
     return HttpResponseRedirect("https://lists.postgresql.org/")
index 0afb90651af927a12c8a7151ca8c3704d7f4c2db..e79048bfd8cc14e85800470f23f385407bc1f879 100644 (file)
@@ -1,2 +1 @@
 from django import forms
-
index fe2701a6758c9ad6932c6edb360fdeb6b2e7711d..1c6e94f1bec2a26bc420e95267b0c6359b11f737 100644 (file)
@@ -7,6 +7,7 @@ from django.db import connection, transaction
 from django.conf import settings
 import requests
 
+
 class Command(BaseCommand):
     help = 'Synchronize mailinglists'
 
@@ -15,9 +16,9 @@ class Command(BaseCommand):
 
     def handle(self, *args, **options):
         if settings.ARCHIVES_SEARCH_PLAINTEXT:
-            proto="http"
+            proto = "http"
         else:
-            proto="https"
+            proto = "https"
         r = requests.get('{0}://{1}/listinfo/'.format(proto, settings.ARCHIVES_SEARCH_SERVER))
         j = r.json()
         allgroups = list(set([l['group'] for l in j]))
index 0d398c9abebec0881dff53650ac9166de6f649ff..55520e19a916b8b5210175df3a62eaff9bbde563 100644 (file)
@@ -1,5 +1,6 @@
 from django.db import models
 
+
 class MailingListGroup(models.Model):
     groupname = models.CharField(max_length=64, null=False, blank=False)
     sortkey = models.IntegerField(null=False, default=10)
@@ -16,6 +17,7 @@ class MailingListGroup(models.Model):
     class Meta:
         ordering = ('sortkey', )
 
+
 class MailingList(models.Model):
     group = models.ForeignKey(MailingListGroup, null=False)
     listname = models.CharField(max_length=64, null=False, blank=False, unique=True)
index 62db498f2afaa7d275d8a025b5ea635d21d9ca3a..42531554dfea11ddbf6154be972c31239b19e1a4 100644 (file)
@@ -4,20 +4,21 @@ import json
 
 from models import MailingList, MailingListGroup
 
+
 def listinfo(request):
     resp = HttpResponse(content_type='application/json')
-    groupdata = [ {
-            'id': g.id,
-            'name': g.groupname,
-            'sort': g.sortkey,
-            } for g in MailingListGroup.objects.all()]
-    listdata = [ {
-            'id': l.id,
-            'name': l.listname,
-            'groupid': l.group_id,
-            'active': l.active,
-            'shortdesc': l.shortdesc,
-            'description': l.description,
-            } for l in MailingList.objects.all()]
+    groupdata = [{
+        'id': g.id,
+        'name': g.groupname,
+        'sort': g.sortkey,
+    } for g in MailingListGroup.objects.all()]
+    listdata = [{
+        'id': l.id,
+        'name': l.listname,
+        'groupid': l.group_id,
+        'active': l.active,
+        'shortdesc': l.shortdesc,
+        'description': l.description,
+    } for l in MailingList.objects.all()]
     json.dump({'groups': groupdata, 'lists': listdata}, resp)
     return resp
index 801d11635ed581455caff32718b339711940a1c0..6529ea184250742fcb1f015d7844e4b4501a9847 100644 (file)
@@ -4,6 +4,7 @@ from email.parser import Parser
 
 from models import QueuedMail
 
+
 class QueuedMailAdmin(admin.ModelAdmin):
     model = QueuedMail
     readonly_fields = ('parsed_content', )
@@ -27,4 +28,5 @@ class QueuedMailAdmin(admin.ModelAdmin):
 
     parsed_content.short_description = 'Parsed mail'
 
+
 admin.site.register(QueuedMail, QueuedMailAdmin)
index 5e019fb635752e5367647af8fb96657b5c146ca8..361b19f00634a98217fa8aecb27c86fd3a33cc3c 100755 (executable)
@@ -13,6 +13,7 @@ import smtplib
 
 from pgweb.mailqueue.models import QueuedMail
 
+
 class Command(BaseCommand):
     help = 'Send queued mail'
 
index 10c50f3df0da3fa9b2564743ac08d3376406deb1..f7018431a3ac9ad146bba14861eb27d5e11650be 100644 (file)
@@ -1,5 +1,6 @@
 from django.db import models
 
+
 class QueuedMail(models.Model):
     sender = models.EmailField(max_length=100, null=False, blank=False)
     receiver = models.EmailField(max_length=100, null=False, blank=False)
index 71c6335076486014b6d10bdef0d934989d3a6116..8b85982e6b748f2b49786cd0e540e5a8008f513a 100644 (file)
@@ -8,11 +8,13 @@ from email.header import Header
 
 from models import QueuedMail
 
+
 def _encoded_email_header(name, email):
     if name:
         return formataddr((str(Header(name, 'utf-8')), email))
     return email
 
+
 def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, usergenerated=False, cc=None, replyto=None, sendername=None, receivername=None, messageid=None):
     # attachment format, each is a tuple of (name, mimetype,contents)
     # content should be *binary* and not base64 encoded, since we need to
@@ -36,14 +38,13 @@ def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, userge
 
     if attachments:
         for filename, contenttype, content in attachments:
-            main,sub = contenttype.split('/')
-            part = MIMENonMultipart(main,sub)
+            main, sub = contenttype.split('/')
+            part = MIMENonMultipart(main, sub)
             part.set_payload(content)
             part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename)
             encoders.encode_base64(part)
             msg.attach(part)
 
-
     # Just write it to the queue, so it will be transactionally rolled back
     QueuedMail(sender=sender, receiver=receiver, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
     if cc:
@@ -52,6 +53,7 @@ def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, userge
         # message content to extract cc fields).
         QueuedMail(sender=sender, receiver=cc, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
 
+
 def send_mail(sender, receiver, fullmsg, usergenerated=False):
     # Send an email, prepared as the full MIME encoded mail already
     QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg, usergenerated=False).save()
index b8b46b8928fad8420b2574edad08762b1104c9ee..5505d82cf1fb34813986e7b4fd89a01a1dd93fcc 100644 (file)
@@ -3,28 +3,30 @@ from django.db.models import Q
 
 from pgweb.core.models import Version
 
+
 class _version_choices():
     def __iter__(self):
         yield ('-1', '** Select version')
         q = Q(supported=True) | Q(testing__gt=0)
         for v in Version.objects.filter(q):
-            for minor in range(v.latestminor,-1,-1):
-                if not v.testing or minor>0:
+            for minor in range(v.latestminor, -1, -1):
+                if not v.testing or minor > 0:
                     # For beta/rc versions, there is no beta0, so exclude it
                     s = v.buildversionstring(minor)
-                    yield (s,s)
+                    yield (s, s)
         yield ('Unsupported/Unknown', 'Unsupported/Unknown')
 
+
 class SubmitBugForm(forms.Form):
     name = forms.CharField(max_length=100, required=True)
     email = forms.EmailField(max_length=100, required=True)
     pgversion = forms.CharField(max_length=20, required=True,
-        label="PostgreSQL version",
-        widget=forms.Select(choices=_version_choices()))
+                                label="PostgreSQL version",
+                                widget=forms.Select(choices=_version_choices()))
     os = forms.CharField(max_length=50, required=True,
-        label="Operating system")
+                         label="Operating system")
     shortdesc = forms.CharField(max_length=100, required=True,
-        label="Short description")
+                                label="Short description")
     details = forms.CharField(required=True, widget=forms.Textarea)
 
     def clean_pgversion(self):
index 58148e7033b965551c9f06ed9d2e9d55edf50ebf..90addd9e2d9e646a5a00201efbb07d9fb67b8d85 100644 (file)
@@ -1,5 +1,6 @@
 from django.db import models
 
+
 class BugIdMap(models.Model):
     # Explicit id field because we don't want a SERIAL here, since we generate
     # the actual bug IDs externally.
index 9656da8192235af291dc1bee696675994e062cea..a0431f9d3ed590ab7e3821f953c6e36db0e6c781 100644 (file)
@@ -17,12 +17,14 @@ from pgweb.misc.models import BugIdMap
 
 from forms import SubmitBugForm
 
+
 def _make_bugs_messageid(bugid):
     return "<{0}-{1}@postgresql.org>".format(
         bugid,
         hashlib.md5("{0}-{1}".format(os.getpid(), time.time())).hexdigest()[:16],
     )
 
+
 @login_required
 def submitbug(request):
     if request.method == 'POST':
@@ -73,17 +75,20 @@ def submitbug(request):
         'savebutton': 'Submit and Send Email',
     })
 
+
 @login_required
 def submitbug_done(request, bugid):
     return render_pgweb(request, 'support', 'misc/bug_completed.html', {
         'bugid': bugid,
     })
 
+
 def bugs_redir(request, bugid):
     r = get_object_or_404(BugIdMap, id=bugid)
 
     return HttpResponseRedirect("{0}/message-id/{1}".format(settings.SITE_ROOT, r.messageid))
 
+
 # A crash testing URL. If the file /tmp/crashtest exists, raise a http 500
 # error. Otherwise, just return a fixed text response
 def crashtest(request):
index 44734b7ce1fcf22bfa9439c5881618df364202ac..545dc4e0726c92eca2e28e714067626c48bf1517 100644 (file)
@@ -3,6 +3,7 @@ from django.contrib import admin
 from pgweb.util.admin import PgwebAdmin
 from models import NewsArticle, NewsTag
 
+
 class NewsArticleAdmin(PgwebAdmin):
     list_display = ('title', 'org', 'date', 'approved', )
     list_filter = ('approved', )
@@ -17,8 +18,10 @@ class NewsArticleAdmin(PgwebAdmin):
         }
         return super(NewsArticleAdmin, self).change_view(request, object_id, extra_context=my_context)
 
+
 class NewsTagAdmin(PgwebAdmin):
     list_display = ('urlname', 'name', 'description')
 
+
 admin.site.register(NewsArticle, NewsArticleAdmin)
 admin.site.register(NewsTag, NewsTagAdmin)
index b28ad8c7a869dbb54095fb2fa571a4a83eba6c0a..aab8585f19bc1a5a77331890b3c9fb90c05798f5 100644 (file)
@@ -4,6 +4,7 @@ from models import NewsArticle
 
 from datetime import datetime, time
 
+
 class NewsFeed(Feed):
     title = description = "PostgreSQL news"
     link = "https://www.postgresql.org/"
@@ -24,4 +25,4 @@ class NewsFeed(Feed):
         return "https://www.postgresql.org/about/news/%s/" % obj.id
 
     def item_pubdate(self, obj):
-        return datetime.combine(obj.date,time.min)
+        return datetime.combine(obj.date, time.min)
index a711cfae74103440e64a8c70befd08c69e088056..d25ac471c739e86ed95f075057480d63b30ece6c 100644 (file)
@@ -4,11 +4,14 @@ from django.forms import ValidationError
 from pgweb.core.models import Organisation
 from models import NewsArticle, NewsTag
 
+
 class NewsArticleForm(forms.ModelForm):
     def __init__(self, *args, **kwargs):
         super(NewsArticleForm, self).__init__(*args, **kwargs)
+
     def filter_by_user(self, user):
         self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+
     def clean_date(self):
         if self.instance.pk and self.instance.approved:
             if self.cleaned_data['date'] != self.instance.date:
index ce49b24cb618bf86b679b8f2ea699974e63554d1..5e980fc110bc3a3c029c5579dd1f77167a6f918c 100644 (file)
@@ -15,6 +15,7 @@ from pgweb.news.models import NewsArticle
 
 import requests_oauthlib
 
+
 class Command(BaseCommand):
     help = 'Post to twitter'
 
@@ -24,7 +25,7 @@ class Command(BaseCommand):
         if not curs.fetchall()[0][0]:
             raise CommandError("Failed to get advisory lock, existing twitter_post process stuck?")
 
-        articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now()-timedelta(days=7)).order_by('date'))
+        articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now() - timedelta(days=7)).order_by('date'))
         if not len(articles):
             return
 
@@ -35,7 +36,7 @@ class Command(BaseCommand):
 
         for a in articles:
             # We hardcode 30 chars for the URL shortener. And then 10 to cover the intro and spacing.
-            statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140-40], settings.SITE_ROOT, a.id)
+            statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140 - 40], settings.SITE_ROOT, a.id)
             r = tw.post('https://api.twitter.com/1.1/statuses/update.json', data={
                 'status': statusstr,
             })
index 6914d10f08c9f6c9b3a21f7ca12f91b1c23a2a62..0b9113925ac997d7311cc97ba14b08f6ad883b74 100644 (file)
@@ -9,6 +9,7 @@ from django.conf import settings
 
 import requests_oauthlib
 
+
 class Command(BaseCommand):
     help = 'Register with twitter oauth'
 
index d31f64e8b2ed7518df6161ef1e1335255d8d9aaa..ab6ed1487f7cab8c26e826aabd61fc280904c1c3 100644 (file)
@@ -2,6 +2,7 @@ from django.db import models
 from datetime import date
 from pgweb.core.models import Organisation
 
+
 class NewsTag(models.Model):
     urlname = models.CharField(max_length=20, null=False, blank=False, unique=True)
     name = models.CharField(max_length=32, null=False, blank=False)
@@ -13,6 +14,7 @@ class NewsTag(models.Model):
     class Meta:
         ordering = ('urlname', )
 
+
 class NewsArticle(models.Model):
     org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
     approved = models.BooleanField(null=False, blank=False, default=False)
index 4c49a196ae5c263958f3a623cdcb5e16202ad204..42a8de5763be39f5ee8e5472593e21bf992d2649 100644 (file)
@@ -1,9 +1,10 @@
 from datetime import date, timedelta
 from models import NewsArticle
 
+
 def get_struct():
     now = date.today()
-    fouryearsago = date.today() - timedelta(4*365, 0, 0)
+    fouryearsago = date.today() - timedelta(4 * 365, 0, 0)
 
     # We intentionally don't put /about/newsarchive/ in the sitemap,
     # since we don't care about getting it indexed.
@@ -14,4 +15,4 @@ def get_struct():
         if yearsold > 4:
             yearsold = 4
         yield ('about/news/%s/' % n.id,
-               0.5-(yearsold/10.0))
+               0.5 - (yearsold / 10.0))
index a055cd17edf5b51ef632ef93d66156f10e47fd2c..2c3a2a7cbd071c2b1d55d678a5c3a2692c183690 100644 (file)
@@ -10,9 +10,10 @@ from forms import NewsArticleForm
 
 import json
 
+
 def archive(request, tag=None, paging=None):
     if tag:
-        tag = get_object_or_404(NewsTag,urlname=tag.strip('/'))
+        tag = get_object_or_404(NewsTag, urlname=tag.strip('/'))
         news = NewsArticle.objects.filter(approved=True, tags=tag)
     else:
         tag = None
@@ -23,6 +24,7 @@ def archive(request, tag=None, paging=None):
         'newstags': NewsTag.objects.all(),
     })
 
+
 def item(request, itemid, throwaway=None):
     news = get_object_or_404(NewsArticle, pk=itemid)
     if not news.approved:
@@ -32,6 +34,7 @@ def item(request, itemid, throwaway=None):
         'newstags': NewsTag.objects.all(),
     })
 
+
 def taglist_json(request):
     return HttpResponse(json.dumps({
         'tags': [{'name': t.urlname, 'description': t.description} for t in NewsTag.objects.distinct('urlname')],
index b644146ca5f2718af9bc8641b5e787c634c8cc35..722a84510537c5b0b9632c81eaf28500577fb062 100644 (file)
@@ -3,9 +3,11 @@ from django.contrib import admin
 from pgweb.util.admin import PgwebAdmin
 from models import ProfessionalService
 
+
 class ProfessionalServiceAdmin(PgwebAdmin):
     list_display = ('__unicode__', 'approved',)
     list_filter = ('approved',)
     search_fields = ('org__name',)
 
+
 admin.site.register(ProfessionalService, ProfessionalServiceAdmin)
index 8df2ff40b005b75f136a6b9cb90434adf0adf9ef..3ec70a93027e4662e8d0cd9aa97bae02a8778876 100644 (file)
@@ -3,13 +3,17 @@ from django import forms
 from pgweb.core.models import Organisation
 from models import ProfessionalService
 
+
 class ProfessionalServiceForm(forms.ModelForm):
     form_intro = """Note that in order to register a new professional service, you must first register an organisation.
 If you have not done so, use <a href="/account/organisations/new/">this form</a>."""
+
     def __init__(self, *args, **kwargs):
         super(ProfessionalServiceForm, self).__init__(*args, **kwargs)
+
     def filter_by_user(self, user):
         self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+
     class Meta:
         model = ProfessionalService
         exclude = ('submitter', 'approved', )
index 180e1d436f780f9647947641423dcf16b613369c..7f5b58db707be5b608ba2ed3a409fa21b61c619b 100644 (file)
@@ -2,14 +2,15 @@ from django.db import models
 
 from pgweb.core.models import Organisation
 
+
 class ProfessionalService(models.Model):
     approved = models.BooleanField(null=False, blank=False, default=False)
 
-    org =  models.OneToOneField(Organisation, null=False, blank=False,
-                            db_column="organisation_id",
-                            verbose_name="organisation",
-                            help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
-    description = models.TextField(null=False,blank=False)
+    org = models.OneToOneField(Organisation, null=False, blank=False,
+                               db_column="organisation_id",
+                               verbose_name="organisation",
+                               help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
+    description = models.TextField(null=False, blank=False)
     employees = models.CharField(max_length=32, null=True, blank=True)
     locations = models.CharField(max_length=128, null=True, blank=True)
     region_africa = models.BooleanField(null=False, default=False, verbose_name="Africa")
index 659753b2d3366688947166953b48775a0bd2f77a..c053d03b140139ab20a7ee014524ac071fe5254b 100644 (file)
@@ -1,5 +1,6 @@
 from views import regions
 
+
 def get_struct():
     for key, name in regions:
         yield ('support/professional_support/%s/' % key, None)
index ad135d22b86d6ce9d1c80376ca702b3b04e9c8cf..e618bee3143064808e2ac3fbd1cff6356f3623a9 100644 (file)
@@ -8,18 +8,19 @@ from models import ProfessionalService
 from forms import ProfessionalServiceForm
 
 regions = (
-   ('africa','Africa'),
-   ('asia','Asia'),
-   ('europe','Europe'),
-   ('northamerica','North America'),
-   ('oceania','Oceania'),
-   ('southamerica','South America'),
+    ('africa', 'Africa'),
+    ('asia', 'Asia'),
+    ('europe', 'Europe'),
+    ('northamerica', 'North America'),
+    ('oceania', 'Oceania'),
+    ('southamerica', 'South America'),
 )
 
+
 def root(request, servtype):
-    title = servtype=='support' and 'Professional Services' or 'Hosting Providers'
-    what = servtype=='support' and 'support' or 'hosting'
-    support = servtype=='support'
+    title = servtype == 'support' and 'Professional Services' or 'Hosting Providers'
+    what = servtype == 'support' and 'support' or 'hosting'
+    support = servtype == 'support'
     return render_pgweb(request, 'support', 'profserv/root.html', {
         'title': title,
         'support': support,
@@ -29,19 +30,19 @@ def root(request, servtype):
 
 
 def region(request, servtype, regionname):
-    regname = [n for r,n in regions if r==regionname]
+    regname = [n for r, n in regions if r == regionname]
     if not regname:
         raise Http404
     regname = regname[0]
 
-    what = servtype=='support' and 'support' or 'hosting'
-    whatname = servtype=='support' and 'Professional Services' or 'Hosting Providers'
+    what = servtype == 'support' and 'support' or 'hosting'
+    whatname = servtype == 'support' and 'Professional Services' or 'Hosting Providers'
     title = "%s - %s" % (whatname, regname)
-    support = servtype=='support'
+    support = servtype == 'support'
 
     # DB model is a bit funky here, so use the extra-where functionality to filter properly.
     # Field names are cleaned up earlier, so it's safe against injections.
-    services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what),])
+    services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what), ])
 
     return render_pgweb(request, 'support', 'profserv/list.html', {
         'title': title,
index 35e13e73ec0b8a1c2219c11bb715a90fcdd596af..588714cb04d51170796f608211e4a6b8537b5cbb 100644 (file)
@@ -3,9 +3,11 @@ from django.contrib import admin
 from pgweb.util.admin import PgwebAdmin
 from models import PUG
 
+
 class PUGAdmin(PgwebAdmin):
     list_display = ('title', 'approved', )
     list_filter = ('approved', )
     search_fields = ('title', )
 
+
 admin.site.register(PUG, PUGAdmin)
index 54383f10012055406944201b16dd176e5680d697..acc42e09d35c568d542c05b35a4dbaddfb2fa866 100644 (file)
@@ -1,5 +1,6 @@
 from django.db import models
 
+
 class PUG(models.Model):
     """
     contains information about a local PostgreSQL user group
index 167fd1f097b4af76e4394c733673cd89d3ffb72e..4333e37c1f72e96503c613fc02eb765ac95e5eb5 100644 (file)
@@ -2,6 +2,7 @@ from pgweb.util.contexts import render_pgweb
 
 from models import PUG
 
+
 def index(request):
     """
     contains list of PUGs, in country/locale alphabetical order
index 39267e17406920f14d7097d76d5e0f19d349917e..aa6bb2d504538b30d94597543a470e369cd53c0b 100644 (file)
@@ -1,7 +1,9 @@
 from django.contrib import admin
 from models import Quote
 
+
 class QuoteAdmin(admin.ModelAdmin):
     list_display = ('quote', 'who', 'org', )
 
+
 admin.site.register(Quote, QuoteAdmin)
index fa690bcfdd6b2baaae31fdeddfb718bb7ab682b8..dbfa59ffc022a2c5e31a1105e21f207db78a2197 100644 (file)
@@ -1,5 +1,6 @@
 from django.db import models
 
+
 class Quote(models.Model):
     approved = models.BooleanField(null=False, default=False)
     quote = models.TextField(null=False, blank=False)
index 4ee48cd5d186e81d5a4d1acc8224fbe97de4308d..24e16895d519d493d6e672d153b81507eabe4043 100644 (file)
@@ -1 +1 @@
-#from django.db import models
+# from django.db import models
index 07f5ded17c44ad2f817d3ff420235aae45ef66bb..65add875b7e161aadac930bfc57b819f133f34d7 100644 (file)
@@ -18,9 +18,10 @@ from pgweb.lists.models import MailingList
 # it, so we allow development installs to run without it...
 try:
     import pylibmc
-    has_memcached=True
+    has_memcached = True
 except:
-    has_memcached=False
+    has_memcached = False
+
 
 def generate_pagelinks(pagenum, totalpages, querystring):
     # Generate a list of links to page through a search result
@@ -31,21 +32,21 @@ def generate_pagelinks(pagenum, totalpages, querystring):
 
     if pagenum > 1:
         # Prev link
-        yield '<a href="%s&p=%s">Prev</a>' % (querystring, pagenum-1)
+        yield '<a href="%s&p=%s">Prev</a>' % (querystring, pagenum - 1)
 
     if pagenum > 10:
         start = pagenum - 10
     else:
         start = 1
 
-    for i in range(start, min(start+20, totalpages + 1)):
+    for i in range(start, min(start + 20, totalpages + 1)):
         if i == pagenum:
             yield "%s" % i
         else:
             yield '<a href="%s&p=%s">%s</a>' % (querystring, i, i)
 
-    if pagenum != min(start+20, totalpages):
-        yield '<a href="%s&p=%s">Next</a>' % (querystring, pagenum+1)
+    if pagenum != min(start + 20, totalpages):
+        yield '<a href="%s&p=%s">Next</a>' % (querystring, pagenum + 1)
 
 
 @csrf_exempt
@@ -102,10 +103,10 @@ def search(request):
             dateval = 365
 
         sortoptions = (
-            {'val':'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')},
-            {'val':'d', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'},
-            {'val':'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'},
-            )
+            {'val': 'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')},
+            {'val': 'd', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'},
+            {'val': 'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'},
+        )
         dateoptions = (
             {'val': -1, 'text': 'anytime'},
             {'val': 1, 'text': 'within last day'},
@@ -113,7 +114,7 @@ def search(request):
             {'val': 31, 'text': 'within last month'},
             {'val': 186, 'text': 'within last 6 months'},
             {'val': 365, 'text': 'within last year'},
-            )
+        )
     else:
         searchlists = False
         if request.GET.has_key('u'):
@@ -130,24 +131,24 @@ def search(request):
     if not request.GET.has_key('q') or request.GET['q'] == '':
         if searchlists:
             return render(request, 'search/listsearch.html', {
-                    'search_error': "No search term specified.",
-                    'sortoptions': sortoptions,
-                    'lists': MailingList.objects.all().order_by("group__sortkey"),
-                    'listid': listid,
-                    'dates': dateoptions,
-                    'dateval': dateval,
-                    })
+                'search_error': "No search term specified.",
+                'sortoptions': sortoptions,
+                'lists': MailingList.objects.all().order_by("group__sortkey"),
+                'listid': listid,
+                'dates': dateoptions,
+                'dateval': dateval,
+            })
         else:
             return render(request, 'search/sitesearch.html', {
-                    'search_error': "No search term specified.",
-                    })
+                'search_error': "No search term specified.",
+            })
     query = request.GET['q'].strip()
 
     # Anti-stefan prevention
     if len(query) > 1000:
         return render(request, 'search/sitesearch.html', {
             'search_error': "Search term too long.",
-            })
+        })
 
     # Is the request being paged?
     if request.GET.has_key('p'):
@@ -167,7 +168,7 @@ def search(request):
         p = {
             'q': query.encode('utf-8'),
             's': listsort,
-            }
+        }
         if listid:
             if listid < 0:
                 # This is a list group, we expand that on the web server
@@ -180,7 +181,7 @@ def search(request):
         # If memcached is available, let's try it
         hits = None
         if has_memcached:
-            memc = pylibmc.Client(['127.0.0.1',], binary=True)
+            memc = pylibmc.Client(['127.0.0.1', ], binary=True)
             # behavior not supported on pylibmc in squeeze:: behaviors={'tcp_nodelay':True})
             try:
                 hits = memc.get(urlstr)
@@ -194,23 +195,23 @@ def search(request):
             else:
                 c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
             c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'})
-            c.sock.settimeout(20) # Set a 20 second timeout
+            c.sock.settimeout(20)  # Set a 20 second timeout
             try:
                 r = c.getresponse()
             except (socket.timeout, ssl.SSLError):
                 return render(request, 'search/listsearch.html', {
-                        'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.',
-                        })
+                    'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.',
+                })
             if r.status != 200:
                 memc = None
                 return render(request, 'search/listsearch.html', {
-                        'search_error': 'Error talking to search server: %s' % r.reason,
-                        })
+                    'search_error': 'Error talking to search server: %s' % r.reason,
+                })
             hits = json.loads(r.read())
             if has_memcached and memc:
                 # Store them in memcached too! But only for 10 minutes...
                 # And always compress it, just because we can
-                memc.set(urlstr, hits, 60*10, 1)
+                memc.set(urlstr, hits, 60 * 10, 1)
                 memc = None
 
         if isinstance(hits, dict):
@@ -226,31 +227,31 @@ def search(request):
             listid or '',
             dateval,
             listsort
-            )
+        )
 
         return render(request, 'search/listsearch.html', {
-                'hitcount': totalhits,
-                'firsthit': firsthit,
-                'lasthit': min(totalhits, firsthit+hitsperpage-1),
-                'query': request.GET['q'],
-                'pagelinks': "&nbsp;".join(
-                    generate_pagelinks(pagenum,
-                                       totalhits / hitsperpage + 1,
-                                       querystr)),
-                'hits': [{
-                        'date': h['d'],
-                        'subject': h['s'],
-                        'author': h['f'],
-                        'messageid': h['m'],
-                        'abstract': h['a'],
-                        'rank': h['r'],
-                        } for h in hits[firsthit-1:firsthit+hitsperpage-1]],
-                'sortoptions': sortoptions,
-                'lists': MailingList.objects.all().order_by("group__sortkey"),
-                'listid': listid,
-                'dates': dateoptions,
-                'dateval': dateval,
-                })
+            'hitcount': totalhits,
+            'firsthit': firsthit,
+            'lasthit': min(totalhits, firsthit + hitsperpage - 1),
+            'query': request.GET['q'],
+            'pagelinks': "&nbsp;".join(
+                generate_pagelinks(pagenum,
+                                   totalhits / hitsperpage + 1,
+                                   querystr)),
+            'hits': [{
+                'date': h['d'],
+                'subject': h['s'],
+                'author': h['f'],
+                'messageid': h['m'],
+                'abstract': h['a'],
+                'rank': h['r'],
+            } for h in hits[firsthit - 1:firsthit + hitsperpage - 1]],
+            'sortoptions': sortoptions,
+            'lists': MailingList.objects.all().order_by("group__sortkey"),
+            'listid': listid,
+            'dates': dateoptions,
+            'dateval': dateval,
+        })
 
     else:
         # Website search is still done by making a regular pgsql connection
@@ -260,8 +261,8 @@ def search(request):
             curs = conn.cursor()
         except:
             return render(request, 'search/sitesearch.html', {
-                    'search_error': 'Could not connect to search database.'
-                    })
+                'search_error': 'Could not connect to search database.'
+            })
 
         # This is kind of a hack, but... Some URLs are flagged as internal
         # and should as such only be included in searches that explicitly
@@ -280,11 +281,11 @@ def search(request):
                 'allsites': allsites,
                 'suburl': suburl,
                 'internal': include_internal,
-                })
+            })
         except psycopg2.ProgrammingError:
             return render(request, 'search/sitesearch.html', {
-                    'search_error': 'Error executing search query.'
-                    })
+                'search_error': 'Error executing search query.'
+            })
 
         hits = curs.fetchall()
         conn.close()
@@ -300,22 +301,22 @@ def search(request):
             urllib.quote_plus(query.encode('utf-8')),
             allsites and "1" or "0",
             quoted_suburl,
-            )
+        )
 
         return render(request, 'search/sitesearch.html', {
-                'suburl': suburl,
-                'allsites': allsites,
-                'hitcount': totalhits,
-                'firsthit': firsthit,
-                'lasthit': min(totalhits, firsthit+hitsperpage-1),
-                'query': request.GET['q'],
-                'pagelinks': "&nbsp;".join(
-                    generate_pagelinks(pagenum,
-                                       totalhits / hitsperpage + 1,
-                                       querystr)),
-                'hits': [{
-                        'title': h[3],
-                        'url': "%s%s" % (h[1], h[2]),
-                        'abstract': h[4].replace("[[[[[[", "<strong>").replace("]]]]]]","</strong>"),
-                        'rank': h[5]} for h in hits[:-1]],
-                })
+            'suburl': suburl,
+            'allsites': allsites,
+            'hitcount': totalhits,
+            'firsthit': firsthit,
+            'lasthit': min(totalhits, firsthit + hitsperpage - 1),
+            'query': request.GET['q'],
+            'pagelinks': "&nbsp;".join(
+                generate_pagelinks(pagenum,
+                                   totalhits / hitsperpage + 1,
+                                   querystr)),
+            'hits': [{
+                'title': h[3],
+                'url': "%s%s" % (h[1], h[2]),
+                'abstract': h[4].replace("[[[[[[", "<strong>").replace("]]]]]]", "</strong>"),
+                'rank': h[5]} for h in hits[:-1]],
+        })
index 977a407b7506a9604e2c0d448647e1029a632f55..03228614d93a7bdc9c229c0b34662990ddc0f238 100644 (file)
@@ -6,26 +6,30 @@ from pgweb.core.models import Version
 from pgweb.news.models import NewsArticle
 from models import SecurityPatch, SecurityPatchVersion
 
+
 class VersionChoiceField(forms.ModelChoiceField):
     def label_from_instance(self, obj):
         return obj.numtree
 
+
 class SecurityPatchVersionAdminForm(forms.ModelForm):
     model = SecurityPatchVersion
     version = VersionChoiceField(queryset=Version.objects.filter(tree__gt=0), required=True)
 
+
 class SecurityPatchVersionAdmin(admin.TabularInline):
     model = SecurityPatchVersion
     extra = 2
     form = SecurityPatchVersionAdminForm
 
+
 class SecurityPatchForm(forms.ModelForm):
     model = SecurityPatch
     newspost = forms.ModelChoiceField(queryset=NewsArticle.objects.filter(org=settings.PGDG_ORG_ID), required=False)
 
     def clean(self):
         d = super(SecurityPatchForm, self).clean()
-        vecs = [v for k,v in d.items() if k.startswith('vector_')]
+        vecs = [v for k, v in d.items() if k.startswith('vector_')]
         empty = [v for v in vecs if v == '']
         if len(empty) != len(vecs) and len(empty) != 0:
             for k in d.keys():
@@ -33,6 +37,7 @@ class SecurityPatchForm(forms.ModelForm):
                     self.add_error(k, 'Either specify all vector values or none')
         return d
 
+
 class SecurityPatchAdmin(admin.ModelAdmin):
     form = SecurityPatchForm
     exclude = ['cvenumber', ]
@@ -54,12 +59,15 @@ class SecurityPatchAdmin(admin.ModelAdmin):
 
     def make_public(self, request, queryset):
         self.do_public(queryset, True)
+
     def make_unpublic(self, request, queryset):
         self.do_public(queryset, False)
+
     def do_public(self, queryset, val):
         # Intentionally loop and do manually, so we generate change notices
         for p in queryset.all():
-            p.public=val
+            p.public = val
             p.save()
 
+
 admin.site.register(SecurityPatch, SecurityPatchAdmin)
index e74c072a3871fec73718662338d0570e33483056..d07641d42f3e414cb9db0c67a3cf96191f9730af 100644 (file)
@@ -13,6 +13,7 @@ from pgweb.util.misc import varnish_purge
 
 import requests
 
+
 class Command(BaseCommand):
     help = 'Update CVE links'
 
index 34166fcaa488d1aff1b4e0b0054ad2fcdae51d03..be317931b5a57a33da4a2c977f81898c20f1b6f0 100644 (file)
@@ -8,7 +8,7 @@ from pgweb.news.models import NewsArticle
 
 import cvss
 
-vector_choices = {k:list(v.items()) for k,v in cvss.constants3.METRICS_VALUE_NAMES.items()}
+vector_choices = {k: list(v.items()) for k, v in cvss.constants3.METRICS_VALUE_NAMES.items()}
 
 component_choices = (
     ('core server', 'Core server product'),
@@ -19,25 +19,29 @@ component_choices = (
     ('other', 'Other'),
 )
 
+
 re_cve = re.compile('^(\d{4})-(\d{4,5})$')
+
+
 def cve_validator(val):
     if not re_cve.match(val):
         raise ValidationError("Enter CVE in format 0000-0000 without the CVE text")
 
+
 def other_vectors_validator(val):
     if val != val.upper():
         raise ValidationError("Vector must be uppercase")
 
     try:
         for vector in val.split('/'):
-            k,v = vector.split(':')
+            k, v = vector.split(':')
             if not cvss.constants3.METRICS_VALUES.has_key(k):
                 raise ValidationError("Metric {0} is unknown".format(k))
             if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'):
                 raise ValidationError("Metric {0} must be specified in the dropdowns".format(k))
             if not cvss.constants3.METRICS_VALUES[k].has_key(v):
                 raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format(
-                    k,v,
+                    k, v,
                     ", ".join(cvss.constants3.METRICS_VALUES[k].keys()),
                 ))
     except ValidationError:
@@ -45,10 +49,11 @@ def other_vectors_validator(val):
     except Exception, e:
         raise ValidationError("Failed to parse vectors: %s" % e)
 
+
 class SecurityPatch(models.Model):
     public = models.BooleanField(null=False, blank=False, default=False)
     newspost = models.ForeignKey(NewsArticle, null=True, blank=True)
-    cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator,])
+    cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator, ])
     cve_visible = models.BooleanField(null=False, blank=False, default=False)
     cvenumber = models.IntegerField(null=False, blank=False, db_index=True)
     detailslink = models.URLField(null=False, blank=True)
@@ -65,7 +70,7 @@ class SecurityPatch(models.Model):
     vector_c = models.CharField(max_length=1, null=False, blank=True, verbose_name="Confidentiality Impact", choices=vector_choices['C'])
     vector_i = models.CharField(max_length=1, null=False, blank=True, verbose_name="Integrity Impact", choices=vector_choices['I'])
     vector_a = models.CharField(max_length=1, null=False, blank=True, verbose_name="Availability Impact", choices=vector_choices['A'])
-    legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'),('B','B'),('C','C'),('D','D')))
+    legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'), ('B', 'B'), ('C', 'C'), ('D', 'D')))
 
     purge_urls = ('/support/security/', )
 
@@ -109,8 +114,8 @@ class SecurityPatch(models.Model):
         verbose_name_plural = 'Security patches'
         ordering = ('-cvenumber',)
 
+
 class SecurityPatchVersion(models.Model):
     patch = models.ForeignKey(SecurityPatch, null=False, blank=False)
     version = models.ForeignKey(Version, null=False, blank=False)
     fixed_minor = models.IntegerField(null=False, blank=False)
-
index eeba3663ee48899d3e58345a2d02fa0350d3976d..5340bafb36d99c57c4d98adfec608bf059391554 100644 (file)
@@ -5,9 +5,11 @@ from pgweb.util.contexts import render_pgweb
 from pgweb.core.models import Version
 from models import SecurityPatch
 
+
 def GetPatchesList(filt):
     return SecurityPatch.objects.raw("SELECT p.*, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END ORDER BY v.tree) AS affected, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END || '.' || fixed_minor ORDER BY v.tree) AS fixed FROM security_securitypatch p INNER JOIN security_securitypatchversion sv ON p.id=sv.patch_id INNER JOIN core_version v ON v.id=sv.version_id WHERE p.public AND {0} GROUP BY p.id ORDER BY cvenumber DESC".format(filt))
 
+
 def _list_patches(request, filt):
     patches = GetPatchesList(filt)
 
@@ -19,10 +21,12 @@ def _list_patches(request, filt):
         ),
     })
 
+
 def index(request):
     # Show all supported versions
     return _list_patches(request, "v.supported")
 
+
 def version(request, numtree):
     version = get_object_or_404(Version, tree=numtree)
     # It's safe to pass in the value since we get it from the module, not from
index 68a86f064758437faea002e05f68cc99f3032af3..1c72af65388d540bf6056fdbac0730e1ba073a54 100644 (file)
@@ -8,12 +8,12 @@ ADMINS = (
 
 MANAGERS = ADMINS
 
-DATABASES={
+DATABASES = {
     'default': {
         'ENGINE': 'django.db.backends.postgresql_psycopg2',
         'NAME': 'pgweb',
-        }
     }
+}
 
 # Local time zone for this installation. Choices can be found here:
 # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
@@ -58,7 +58,7 @@ MIDDLEWARE_CLASSES = [
     'pgweb.util.middleware.PgMiddleware',
 ]
 
-CSRF_FAILURE_VIEW='pgweb.core.views.csrf_failure'
+CSRF_FAILURE_VIEW = 'pgweb.core.views.csrf_failure'
 
 ROOT_URLCONF = 'pgweb.urls'
 
@@ -80,9 +80,9 @@ TEMPLATES = [{
     },
 }]
 
-LOGIN_URL='/account/login/'
-LOGIN_REDIRECT_URL='/account/'
-LOGOUT_URL='/account/logout/'
+LOGIN_URL = '/account/login/'
+LOGIN_REDIRECT_URL = '/account/'
+LOGOUT_URL = '/account/logout/'
 
 AUTHENTICATION_BACKENDS = (
     'pgweb.util.auth.AuthBackend',
@@ -118,48 +118,48 @@ INSTALLED_APPS = [
 ]
 
 # Default format for date/time (as it changes between machines)
-DATETIME_FORMAT="Y-m-d H:i:s"
+DATETIME_FORMAT = "Y-m-d H:i:s"
 
 # Configure recaptcha. Most details contain keys and are thus handled
 # in settings_local.py. Override NOCAPTCHA to actually use them.
-NOCAPTCHA=True
-RECAPTCHA_SITE_KEY=""
-RECAPTCHA_SECRET_KEY=""
+NOCAPTCHA = True
+RECAPTCHA_SITE_KEY = ""
+RECAPTCHA_SECRET_KEY = ""
 
 ###
 # Application specific settings, likely overridden in settings_local.py.
 #
 # In particular, adjust the email addresses
 ###
-SESSION_COOKIE_SECURE=True                             # Allow our session only over https
-SESSION_COOKIE_DOMAIN="www.postgresql.org"             # Don't allow access by other postgresql.org sites
-SESSION_COOKIE_HTTPONLY=True                           # Access over http only, no js
-CSRF_COOKIE_SECURE=SESSION_COOKIE_SECURE
-CSRF_COOKIE_DOMAIN=SESSION_COOKIE_DOMAIN
-CSRF_COOKIE_HTTPONLY=SESSION_COOKIE_HTTPONLY
-
-SITE_ROOT="http://www.postgresql.org"                  # Root of working URLs
-FTP_PICKLE="/usr/local/pgweb/ftpsite.pickle"           # Location of file with current contents from ftp site
-YUM_JSON="/usr/local/pgweb/external/yum.json"
-STATIC_CHECKOUT="/usr/local/pgweb-static"              # Location of a checked out pgweb-static project
-NOTIFICATION_EMAIL="someone@example.com"               # Address to send notifications *to*
-NOTIFICATION_FROM="someone@example.com"                # Address to send notifications *from*
-ACCOUNTS_NOREPLY_FROM="someone@example.com"            # Address to send account messages from
-BUGREPORT_EMAIL="someone@example.com"                  # Address to pgsql-bugs list
-BUGREPORT_NOREPLY_EMAIL="someone-noreply@example.com"  # Address to no-reply pgsql-bugs address
-DOCSREPORT_EMAIL="someone@example.com"                 # Address to pgsql-docs list
-DOCSREPORT_NOREPLY_EMAIL="someone-noreply@example.com" # Address to no-reply pgsql-docs address
-FRONTEND_SERVERS=()                                    # A tuple containing the *IP addresses* of all the
-                                                       # varnish frontend servers in use.
-FTP_MASTERS=()                                           # A tuple containing the *IP addresses* of all machines
-                                                       # trusted to upload ftp structure data
-VARNISH_PURGERS=()                                     # Extra servers that can do varnish purges through our queue
-DO_ESI=False                                           # Generate ESI tags
-ARCHIVES_SEARCH_SERVER="archives.postgresql.org"       # Where to post REST request for archives search
-ARCHIVES_SEARCH_PLAINTEXT=False                        # Contact ARCHIVES_SEARCH_SERVER with http instead of https
-FRONTEND_SMTP_RELAY="magus.postgresql.org"             # Where to relay user generated email
-OAUTH={}                                               # OAuth providers and keys
-PGDG_ORG_ID=-1                                         # id of the PGDG organisation entry
+SESSION_COOKIE_SECURE = True                                # Allow our session only over https
+SESSION_COOKIE_DOMAIN = "www.postgresql.org"                # Don't allow access by other postgresql.org sites
+SESSION_COOKIE_HTTPONLY = True                              # Access over http only, no js
+CSRF_COOKIE_SECURE = SESSION_COOKIE_SECURE
+CSRF_COOKIE_DOMAIN = SESSION_COOKIE_DOMAIN
+CSRF_COOKIE_HTTPONLY = SESSION_COOKIE_HTTPONLY
+
+SITE_ROOT = "http://www.postgresql.org"                     # Root of working URLs
+FTP_PICKLE = "/usr/local/pgweb/ftpsite.pickle"              # Location of file with current contents from ftp site
+YUM_JSON = "/usr/local/pgweb/external/yum.json"
+STATIC_CHECKOUT = "/usr/local/pgweb-static"                 # Location of a checked out pgweb-static project
+NOTIFICATION_EMAIL = "someone@example.com"                  # Address to send notifications *to*
+NOTIFICATION_FROM = "someone@example.com"                   # Address to send notifications *from*
+ACCOUNTS_NOREPLY_FROM = "someone@example.com"               # Address to send account messages from
+BUGREPORT_EMAIL = "someone@example.com"                     # Address to pgsql-bugs list
+BUGREPORT_NOREPLY_EMAIL = "someone-noreply@example.com"     # Address to no-reply pgsql-bugs address
+DOCSREPORT_EMAIL = "someone@example.com"                    # Address to pgsql-docs list
+DOCSREPORT_NOREPLY_EMAIL = "someone-noreply@example.com"    # Address to no-reply pgsql-docs address
+FRONTEND_SERVERS = ()                                       # A tuple containing the *IP addresses* of all the
+                                                            # varnish frontend servers in use.
+FTP_MASTERS = ()                                            # A tuple containing the *IP addresses* of all machines
+                                                            # trusted to upload ftp structure data
+VARNISH_PURGERS = ()                                        # Extra servers that can do varnish purges through our queue
+DO_ESI = False                                              # Generate ESI tags
+ARCHIVES_SEARCH_SERVER = "archives.postgresql.org"          # Where to post REST request for archives search
+ARCHIVES_SEARCH_PLAINTEXT = False                           # Contact ARCHIVES_SEARCH_SERVER with http instead of https
+FRONTEND_SMTP_RELAY = "magus.postgresql.org"                # Where to relay user generated email
+OAUTH = {}                                                  # OAuth providers and keys
+PGDG_ORG_ID = -1                                            # id of the PGDG organisation entry
 
 # Load local settings overrides
 from settings_local import *
index e5d48d833e6cda39b22601f6acc42d7ef46ee5a0..378d789e96516a56c54b7cd0bf18140cd9e62dfd 100644 (file)
@@ -2,6 +2,7 @@ from django.db import models
 
 from pgweb.core.models import Country
 
+
 class SponsorType(models.Model):
     typename = models.CharField(max_length=32, null=False, blank=False)
     description = models.TextField(null=False, blank=False)
@@ -16,6 +17,7 @@ class SponsorType(models.Model):
     class Meta:
         ordering = ('sortkey', )
 
+
 class Sponsor(models.Model):
     sponsortype = models.ForeignKey(SponsorType, null=False)
     name = models.CharField(max_length=128, null=False, blank=False)
@@ -31,6 +33,7 @@ class Sponsor(models.Model):
     class Meta:
         ordering = ('name', )
 
+
 class Server(models.Model):
     name = models.CharField(max_length=32, null=False, blank=False)
     sponsors = models.ManyToManyField(Sponsor)
index 15bef9d4de4a5b15be61e7653d7bf8afc60de531..d78356f13bf3fb29e75339659de4a41c3e77e050 100644 (file)
@@ -3,13 +3,15 @@ from pgweb.util.decorators import cache
 
 from models import Sponsor, Server
 
+
 @cache(minutes=30)
 def sponsors(request):
-    sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey' ,'?')
+    sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey''?')
     return render_pgweb(request, 'about', 'sponsors/sponsors.html', {
         'sponsors': sponsors,
     })
 
+
 def servers(request):
     servers = Server.objects.select_related().all()
     return render_pgweb(request, 'about', 'sponsors/servers.html', {
index cdd9c4cdac2b1c5fe975cf0455ed5204f250e96a..7d59e3138ea48ccf913c11d7b9cec14d89f141c5 100644 (file)
@@ -1,13 +1,16 @@
 from django.contrib import admin
 from models import Survey, SurveyLock, SurveyAnswer
 
+
 class SurveyAdmin(admin.ModelAdmin):
-    list_display = ('question','posted','current',)
-    ordering = ('-posted',)
+    list_display = ('question', 'posted', 'current', )
+    ordering = ('-posted', )
+
 
 class SurveyAnswerAdmin(admin.ModelAdmin):
-    list_display = ('survey','tot1','tot2','tot3','tot4','tot5','tot6','tot7','tot8')
-    ordering = ('-survey__posted',)
+    list_display = ('survey', 'tot1', 'tot2', 'tot3', 'tot4', 'tot5', 'tot6', 'tot7', 'tot8')
+    ordering = ('-survey__posted', )
+
 
 admin.site.register(Survey, SurveyAdmin)
 admin.site.register(SurveyLock)
index 72cfe7c716094f9d66e63f3f29a58af591c082ce..237d31771027f26177f33c9f499e24a75eb8d746 100644 (file)
@@ -1,16 +1,20 @@
 from django.db import models
 
+
 # internal text/value object
 class SurveyQuestion(object):
     def __init__(self, value, text):
         self.value = value
         self.text = text
+
+
 class SurveyAnswerValues(object):
     def __init__(self, option, votes, votespercent):
         self.option = option
         self.votes = votes
         self.votespercent = votespercent
 
+
 class Survey(models.Model):
     question = models.CharField(max_length=500, null=False, blank=False)
     opt1 = models.CharField(max_length=500, null=False, blank=False)
@@ -31,7 +35,7 @@ class Survey(models.Model):
 
     @property
     def questions(self):
-        for i in range (1,9):
+        for i in range(1, 9):
             v = getattr(self, "opt%s" % i)
             if not v: break
             yield SurveyQuestion(i, v)
@@ -45,22 +49,22 @@ class Survey(models.Model):
     @property
     def completeanswers(self):
         for a in self._get_complete_answers():
-            yield SurveyAnswerValues(a[0], a[1], self.totalvotes>0 and (100*a[1]/self.totalvotes) or 0)
+            yield SurveyAnswerValues(a[0], a[1], self.totalvotes > 0 and (100 * a[1] / self.totalvotes) or 0)
 
     @property
     def totalvotes(self):
-        if not hasattr(self,"_totalvotes"):
+        if not hasattr(self, "_totalvotes"):
             self._totalvotes = 0
             for a in self._get_complete_answers():
                 self._totalvotes = self._totalvotes + a[1]
         return self._totalvotes
 
     def _get_complete_answers(self):
-        for i in range(1,9):
+        for i in range(1, 9):
             q = getattr(self, "opt%s" % i)
             if not q: break
             n = getattr(self.answers, "tot%s" % i)
-            yield (q,n)
+            yield (q, n)
 
     def save(self):
         # Make sure only one survey at a time can be the current one
@@ -71,12 +75,13 @@ class Survey(models.Model):
             for p in previous:
                 if not p == self:
                     p.current = False
-                    p.save() # primary key check avoids recursion
+                    p.save()  # primary key check avoids recursion
 
         # Now that we've made any previously current ones non-current, we are
         # free to save this one.
         super(Survey, self).save()
 
+
 class SurveyAnswer(models.Model):
     survey = models.OneToOneField(Survey, null=False, blank=False, primary_key=True)
     tot1 = models.IntegerField(null=False, default=0)
@@ -90,6 +95,7 @@ class SurveyAnswer(models.Model):
 
     purge_urls = ('/community/survey', )
 
+
 class SurveyLock(models.Model):
     ipaddr = models.GenericIPAddressField(null=False, blank=False)
     time = models.DateTimeField(null=False, auto_now_add=True)
index ba3c09a3f7249f6867eadb74681550e7b42c91e9..a30f96c6e1faa9962801f74d0eefcebe7f61b850 100644 (file)
@@ -10,6 +10,7 @@ from pgweb.util.helpers import HttpServerError
 
 from models import Survey, SurveyAnswer, SurveyLock
 
+
 def results(request, surveyid, junk=None):
     survey = get_object_or_404(Survey, pk=surveyid)
     surveylist = Survey.objects.all().order_by('-posted')
@@ -19,6 +20,7 @@ def results(request, surveyid, junk=None):
         'surveylist': surveylist,
     })
 
+
 # Served over insecure HTTP, the Varnish proxy strips cookies
 @csrf_exempt
 def vote(request, surveyid):
@@ -51,7 +53,7 @@ def vote(request, surveyid):
     lock.save()
 
     answers = SurveyAnswer.objects.get_or_create(survey=surv)[0]
-    setattr(answers, attrname, getattr(answers, attrname)+1)
+    setattr(answers, attrname, getattr(answers, attrname) + 1)
     answers.save()
 
     # Do explicit varnish purge, since it seems that the model doesn't
index 6b59a1dcb066db3866c4bbed2922b16dae3c2381..b3b10f5835ac32649c3e25d54f186fa54e354230 100644 (file)
@@ -51,7 +51,7 @@ class PgwebAdmin(admin.ModelAdmin):
         for x in queryset:
             x.delete()
     custom_delete_selected.short_description = "Delete selected items"
-    actions=['custom_delete_selected']
+    actions = ['custom_delete_selected']
 
     def save_model(self, request, obj, form, change):
         if change and hasattr(self.model, 'send_notification') and self.model.send_notification:
@@ -81,14 +81,16 @@ class PgwebAdmin(admin.ModelAdmin):
                                  msgstr)
 
                 # Also generate a mail to the moderators
-                send_simple_mail(settings.NOTIFICATION_FROM,
-                                 settings.NOTIFICATION_EMAIL,
-                                 "Moderation comment on %s %s" % (obj.__class__._meta.verbose_name, obj.id),
-                                 _get_moderator_notification_text(obj,
-                                                                  request.POST['new_notification'],
-                                                                  request.user.username
-                                                              ))
-
+                send_simple_mail(
+                    settings.NOTIFICATION_FROM,
+                    settings.NOTIFICATION_EMAIL,
+                    "Moderation comment on %s %s" % (obj.__class__._meta.verbose_name, obj.id),
+                    _get_moderator_notification_text(
+                        obj,
+                        request.POST['new_notification'],
+                        request.user.username
+                    )
+                )
 
         # Either no notifications, or done with notifications
         super(PgwebAdmin, self).save_model(request, obj, form, change)
@@ -112,7 +114,6 @@ request, and your submission will be re-moderated.
 """ % (objtype, txt)
 
 
-
 def _get_moderator_notification_text(obj, txt, moderator):
     return """Moderator %s made a comment to a pending object:
 Object type: %s
index de016ab7cac807c264a2f16e60f13c427c1609a9..218dfc3ed611329c3d8a488ea374d43d200bc1a4 100644 (file)
@@ -1,6 +1,7 @@
 from django.contrib.auth.models import User
 from django.contrib.auth.backends import ModelBackend
 
+
 # Special version of the authentication backend, so we can handle things like
 # forced lowercasing of usernames.
 class AuthBackend(ModelBackend):
@@ -19,4 +20,4 @@ class AuthBackend(ModelBackend):
             # User not found, so clearly they can't log in!
             return None
 
-        return None # Should never get here, but just in case...
+        return None  # Should never get here, but just in case...
index a29a64f4fe1d7885f689dbb61ec3fff51deae574..853ac1d915fdc5e4a2c2898abc2302f34e1b0ee0 100644 (file)
@@ -5,83 +5,83 @@ from django.conf import settings
 # This is the whole site navigation structure. Stick in a smarter file?
 sitenav = {
     'about': [
-        {'title': 'About',              'link':'/about/'},
-        {'title': 'Code of Conduct',    'link':'/about/policies/coc/', 'submenu': [
-            {'title': 'Committee',      'link':'/about/policies/coc_committee/'}
+        {'title': 'About', 'link': '/about/'},
+        {'title': 'Code of Conduct', 'link': '/about/policies/coc/', 'submenu': [
+            {'title': 'Committee', 'link': '/about/policies/coc_committee/'}
         ]},
-        {'title': 'Feature Matrix',     'link':'/about/featurematrix/'},
-        {'title': 'Donate',             'link':'/about/donate/'},
-        {'title': 'History',            'link':'/docs/current/history.html'},
-        {'title': 'Sponsors',           'link':'/about/sponsors/', 'submenu': [
-            {'title': 'Servers',    'link': '/about/servers/'},
+        {'title': 'Feature Matrix', 'link': '/about/featurematrix/'},
+        {'title': 'Donate', 'link': '/about/donate/'},
+        {'title': 'History', 'link': '/docs/current/history.html'},
+        {'title': 'Sponsors', 'link': '/about/sponsors/', 'submenu': [
+            {'title': 'Servers', 'link': '/about/servers/'},
         ]},
-        {'title': 'Latest News',        'link':'/about/newsarchive/'},
-        {'title': 'Upcoming Events',    'link':'/about/events/'},
-        {'title': 'Press',              'link':'/about/press/'},
-        {'title': 'Licence',            'link':'/about/licence/'},
+        {'title': 'Latest News', 'link': '/about/newsarchive/'},
+        {'title': 'Upcoming Events', 'link': '/about/events/'},
+        {'title': 'Press', 'link': '/about/press/'},
+        {'title': 'Licence', 'link': '/about/licence/'},
     ],
     'download': [
-        {'title': 'Downloads',          'link':'/download/', 'submenu': [
-                {'title': 'Binary',        'link':'/download/'},
-                {'title': 'Source',        'link':'/ftp/source/'}
+        {'title': 'Downloads', 'link': '/download/', 'submenu': [
+            {'title': 'Binary', 'link': '/download/'},
+            {'title': 'Source', 'link': '/ftp/source/'}
         ]},
-        {'title': 'Software Catalogue', 'link':'/download/product-categories/'},
-        {'title': 'File Browser',       'link':'/ftp/'},
+        {'title': 'Software Catalogue', 'link': '/download/product-categories/'},
+        {'title': 'File Browser', 'link': '/ftp/'},
     ],
     'docs': [
-        {'title': 'Documentation',      'link':'/docs/'},
-        {'title': 'Manuals',            'link':'/docs/manuals/', 'submenu': [
-            {'title': 'Archive',    'link':'/docs/manuals/archive/'},
-            {'title': 'French',     'link':'https://docs.postgresql.fr/'},
-            {'title': 'Japanese',   'link':'http://www.postgresql.jp/document/'},
-            {'title': 'Russian',    'link':'https://postgrespro.ru/docs/postgresql'},
+        {'title': 'Documentation', 'link': '/docs/'},
+        {'title': 'Manuals', 'link': '/docs/manuals/', 'submenu': [
+            {'title': 'Archive', 'link': '/docs/manuals/archive/'},
+            {'title': 'French', 'link': 'https://docs.postgresql.fr/'},
+            {'title': 'Japanese', 'link': 'http://www.postgresql.jp/document/'},
+            {'title': 'Russian', 'link': 'https://postgrespro.ru/docs/postgresql'},
         ]},
-        {'title': 'Books',              'link':'/docs/books/'},
-        {'title': 'Online Resources',   'link':'/docs/online-resources/'},
-        {'title': 'Wiki',               'link':'https://wiki.postgresql.org'},
+        {'title': 'Books', 'link': '/docs/books/'},
+        {'title': 'Online Resources', 'link': '/docs/online-resources/'},
+        {'title': 'Wiki', 'link': 'https://wiki.postgresql.org'},
     ],
     'community': [
-        {'title': 'Community',          'link':'/community/'},
-        {'title': 'Contributors',       'link':'/community/contributors/'},
-        {'title': 'Mailing Lists',      'link':'/list/'},
-        {'title': 'IRC',                'link':'/community/irc/'},
-        {'title': 'Slack',              'link':'https://postgres-slack.herokuapp.com/'},
-        {'title': 'Local User Groups',  'link':'/community/user-groups/'},
-        {'title': 'Events',             'link':'/about/events/'},
-        {'title': 'International Sites','link':'/community/international/'},
-        {'title': 'Recognition Guidelines','link':'/community/recognition/'},
+        {'title': 'Community', 'link': '/community/'},
+        {'title': 'Contributors', 'link': '/community/contributors/'},
+        {'title': 'Mailing Lists', 'link': '/list/'},
+        {'title': 'IRC', 'link': '/community/irc/'},
+        {'title': 'Slack', 'link': 'https://postgres-slack.herokuapp.com/'},
+        {'title': 'Local User Groups', 'link': '/community/user-groups/'},
+        {'title': 'Events', 'link': '/about/events/'},
+        {'title': 'International Sites', 'link': '/community/international/'},
+        {'title': 'Recognition Guidelines', 'link': '/community/recognition/'},
     ],
     'developer': [
-        {'title': 'Developers',         'link':'/developer/'},
-        {'title': 'Core Team',          'link':'/developer/core/'},
-        {'title': 'Roadmap',            'link':'/developer/roadmap/'},
-        {'title': 'Coding',             'link':'/developer/coding/'},
-        {'title': 'Testing',            'link':'/developer/testing/', 'submenu': [
-            {'title': 'Beta Information',  'link':'/developer/beta/'},
+        {'title': 'Developers', 'link': '/developer/'},
+        {'title': 'Core Team', 'link': '/developer/core/'},
+        {'title': 'Roadmap', 'link': '/developer/roadmap/'},
+        {'title': 'Coding', 'link': '/developer/coding/'},
+        {'title': 'Testing', 'link': '/developer/testing/', 'submenu': [
+            {'title': 'Beta Information', 'link': '/developer/beta/'},
         ]},
-        {'title': 'Mailing Lists',      'link':'/list/'},
-        {'title': 'Developer FAQ',      'link':'https://wiki.postgresql.org/wiki/Developer_FAQ'},
+        {'title': 'Mailing Lists', 'link': '/list/'},
+        {'title': 'Developer FAQ', 'link': 'https://wiki.postgresql.org/wiki/Developer_FAQ'},
     ],
     'support': [
-        {'title': 'Support',            'link':'/support/'},
-        {'title': 'Versioning Policy',  'link':'/support/versioning/'},
-        {'title': 'Security',           'link':'/support/security/'},
-        {'title': 'Professional Services','link':'/support/professional_support/'},
-        {'title': 'Hosting Solutions',  'link':'/support/professional_hosting/'},
-        {'title': 'Report a Bug',       'link':'/account/submitbug/'},
+        {'title': 'Support', 'link': '/support/'},
+        {'title': 'Versioning Policy', 'link': '/support/versioning/'},
+        {'title': 'Security', 'link': '/support/security/'},
+        {'title': 'Professional Services', 'link': '/support/professional_support/'},
+        {'title': 'Hosting Solutions', 'link': '/support/professional_hosting/'},
+        {'title': 'Report a Bug', 'link': '/account/submitbug/'},
     ],
     'account': [
-        {'title': 'Your account',         'link':'/account'},
-        {'title': 'Profile',            'link':'/account/profile'},
-        {'title': 'Submitted data',          'link':'/account', 'submenu': [
-            {'title': 'News Articles',  'link':'/account/edit/news/'},
-            {'title': 'Events',         'link':'/account/edit/events/'},
-            {'title': 'Products',       'link':'/account/edit/products/'},
-            {'title': 'Professional Services', 'link':'/account/edit/services/'},
-            {'title': 'Organisations',  'link':'/account/edit/organisations/'},
+        {'title': 'Your account', 'link': '/account'},
+        {'title': 'Profile', 'link': '/account/profile'},
+        {'title': 'Submitted data', 'link': '/account', 'submenu': [
+            {'title': 'News Articles', 'link': '/account/edit/news/'},
+            {'title': 'Events', 'link': '/account/edit/events/'},
+            {'title': 'Products', 'link': '/account/edit/products/'},
+            {'title': 'Professional Services', 'link': '/account/edit/services/'},
+            {'title': 'Organisations', 'link': '/account/edit/organisations/'},
         ]},
-        {'title': 'Change password',    'link':'/account/changepwd/'},
-        {'title': 'Logout',             'link':'/account/logout'},
+        {'title': 'Change password', 'link': '/account/changepwd/'},
+        {'title': 'Logout', 'link': '/account/logout'},
     ],
 }
 
@@ -92,10 +92,12 @@ def get_nav_menu(section):
     else:
         return {}
 
+
 def render_pgweb(request, section, template, context):
     context['navmenu'] = get_nav_menu(section)
     return render(request, template, context)
 
+
 def _get_gitrev():
     # Return the current git revision, that is used for
     # cache-busting URLs.
@@ -115,6 +117,7 @@ def _get_gitrev():
             # If packed-refs also can't be read, just give up
             return 'eeeeeeee'
 
+
 # Template context processor to add information about the root link and
 # the current git revision. git revision is returned as a lazy object so
 # we don't spend effort trying to load it if we don't need it (though
index f29fd1604de057930629f3d2641542f1318b2f04..9333d456f55f1a4026c65cfafdc252f092c6a714 100644 (file)
@@ -3,6 +3,7 @@ from functools import wraps
 from collections import defaultdict
 from django.contrib.auth.decorators import login_required as django_login_required
 
+
 def nocache(fn):
     def _nocache(request, *_args, **_kwargs):
         resp = fn(request, *_args, **_kwargs)
@@ -10,17 +11,19 @@ def nocache(fn):
         return resp
     return _nocache
 
+
 def cache(days=0, hours=0, minutes=0, seconds=0):
     "Set the server to cache object a specified time. td must be a timedelta object"
     def _cache(fn):
         def __cache(request, *_args, **_kwargs):
             resp = fn(request, *_args, **_kwargs)
             td = datetime.timedelta(hours=hours, minutes=minutes, seconds=seconds)
-            resp['Cache-Control'] = 's-maxage=%s' % (td.days*3600*24 + td.seconds)
+            resp['Cache-Control'] = 's-maxage=%s' % (td.days * 3600 * 24 + td.seconds)
             return resp
         return __cache
     return _cache
 
+
 def allow_frames(fn):
     def _allow_frames(request, *_args, **_kwargs):
         resp = fn(request, *_args, **_kwargs)
@@ -28,6 +31,7 @@ def allow_frames(fn):
         return resp
     return _allow_frames
 
+
 def content_sources(what, source):
     def _script_sources(fn):
         def __script_sources(request, *_args, **_kwargs):
@@ -39,12 +43,15 @@ def content_sources(what, source):
         return __script_sources
     return _script_sources
 
+
 def script_sources(source):
     return content_sources('script', source)
 
+
 def frame_sources(source):
     return content_sources('frame', source)
 
+
 # A wrapped version of login_required that throws an exception if it's
 # used on a path that's not under /account/.
 def login_required(f):
index 2159381bdda9b3f3d489743d545bc316748c9958..60686ddb1f0e5443424a2f656cd26c23caa593cb 100644 (file)
@@ -4,6 +4,7 @@ from django.http import HttpResponseRedirect, Http404
 from django.template.loader import get_template
 import django.utils.xmlutils
 
+
 def simple_form(instancetype, itemid, request, formclass, formtemplate='base/form.html', redirect='/account/', navsection='account', fixedfields=None, createifempty=False):
     if itemid == 'new':
         instance = instancetype()
@@ -33,7 +34,7 @@ def simple_form(instancetype, itemid, request, formclass, formtemplate='base/for
             # Set fixed fields. Note that this will not work if the fixed fields are ManyToMany,
             # but we'll fix that sometime in the future
             if fixedfields:
-                for k,v in fixedfields.items():
+                for k, v in fixedfields.items():
                     setattr(r, k, v)
             r.save()
 
@@ -72,13 +73,15 @@ def simple_form(instancetype, itemid, request, formclass, formtemplate='base/for
         'operation': (itemid == "new") and "New" or "Edit",
     })
 
-def template_to_string(templatename, attrs = {}):
+
+def template_to_string(templatename, attrs={}):
     return get_template(templatename).render(attrs)
 
+
 def HttpServerError(request, msg):
     r = render(request, 'errors/500.html', {
-            'message': msg,
-            })
+        'message': msg,
+    })
     r.status_code = 500
     return r
 
@@ -89,7 +92,7 @@ class PgXmlHelper(django.utils.xmlutils.SimplerXMLGenerator):
         self.skipempty = skipempty
 
     def add_xml_element(self, name, value):
-        if self.skipempty and value=='': return
+        if self.skipempty and value == '': return
         self.startElement(name, {})
         self.characters(value)
         self.endElement(name)
index 9abcae2e88d4270263bfae4896f7ef83f5f95376..941de86ea0b7da17882f8d9b6504e168d6c27132 100644 (file)
@@ -13,6 +13,8 @@ except ImportError:
     from django.utils._threading_local import local
 
 _thread_locals = local()
+
+
 def get_current_user():
     return getattr(_thread_locals, 'user', None)
 
@@ -24,7 +26,7 @@ class PgMiddleware(object):
         return None
 
     def process_request(self, request):
-# Thread local store for username, see comment at the top of this file
+        # Thread local store for username, see comment at the top of this file
         _thread_locals.user = getattr(request, 'user', None)
         initialize_template_collection()
 
@@ -46,16 +48,16 @@ class PgMiddleware(object):
             ('connect', ["'self'", "www.google-analytics.com", "ssl.google-analytics.com"]),
             ('media', ["'self'", ]),
             ('style', ["'self'", "fonts.googleapis.com"]),
-            ('font', ["'self'", "fonts.gstatic.com", "data:" ]),
+            ('font', ["'self'", "fonts.gstatic.com", "data:", ]),
         ])
         if hasattr(response, 'x_allow_extra_sources'):
-            for k,v in response.x_allow_extra_sources.items():
+            for k, v in response.x_allow_extra_sources.items():
                 if k in sources:
                     sources[k].extend(v)
                 else:
                     sources[k] = v
 
-        security_policies = ["{0}-src {1}".format(k," ".join(v)) for k,v in sources.items()]
+        security_policies = ["{0}-src {1}".format(k, " ".join(v)) for k, v in sources.items()]
 
         if not getattr(response, 'x_allow_frames', False):
             response['X-Frame-Options'] = 'DENY'
index 760ba872b6a3f3aca2a5296718862c801d3f28b3..65ffb330f43f1aa4821cde4bc1151c77a9894992 100644 (file)
@@ -8,6 +8,7 @@ from pgweb.mailqueue.util import send_simple_mail
 from pgweb.util.helpers import template_to_string
 import re
 
+
 def send_template_mail(sender, receiver, subject, templatename, templateattr={}, usergenerated=False, cc=None, replyto=None, receivername=None, sendername=None, messageid=None):
     d = {
         'link_root': settings.SITE_ROOT,
@@ -19,6 +20,7 @@ def send_template_mail(sender, receiver, subject, templatename, templateattr={},
                      receivername=receivername, sendername=sendername,
                      messageid=messageid)
 
+
 def get_client_ip(request):
     """
     Get the IP of the client. If the client is served through our Varnish caches,
@@ -41,6 +43,7 @@ def varnish_purge_xkey(xkey):
     """
     connection.cursor().execute("SELECT varnish_purge_xkey(%s)", (xkey, ))
 
+
 def varnish_purge(url):
     """
     Purge the specified URL from Varnish. Will add initial anchor to the URL,
@@ -49,6 +52,7 @@ def varnish_purge(url):
     url = '^%s' % url
     connection.cursor().execute("SELECT varnish_purge(%s)", (url, ))
 
+
 def varnish_purge_expr(expr):
     """
     Purge the specified expression from Varnish. Does not modify the expression
@@ -56,6 +60,7 @@ def varnish_purge_expr(expr):
     """
     connection.cursor().execute("SELECT varnish_purge_expr(%s)", (expr, ))
 
+
 def version_sort(l):
     """
     map a directory name to a format that will show up sensibly in an ascii sort
@@ -64,12 +69,12 @@ def version_sort(l):
     generally don't have that.
     """
     mkey = l['link']
-    m = re.match('v?([0-9]+)\.([0-9]+)\.([0-9]+)$',l['url'])
+    m = re.match('v?([0-9]+)\.([0-9]+)\.([0-9]+)$', l['url'])
     if m:
-        mkey = m.group(1) + '%02d' % int(m.group(2)) + '%02d' % int(m.group(3));
-    m = re.match('v?([0-9]+)\.([0-9]+)$',l['url'])
+        mkey = m.group(1) + '%02d' % int(m.group(2)) + '%02d' % int(m.group(3))
+    m = re.match('v?([0-9]+)\.([0-9]+)$', l['url'])
     if m:
-        mkey = m.group(1) + '%02d' % int(m.group(2));
+        mkey = m.group(1) + '%02d' % int(m.group(2))
         # SOOO ugly. But if it's v10 and up, just prefix it to get it higher
         if int(m.group(1)) >= 10:
             mkey = 'a' + mkey
@@ -80,6 +85,7 @@ def version_sort(l):
 
     return mkey
 
+
 def generate_random_token():
     """
     Generate a random token of 64 characters. This token will be
index d67c97487b5b5f6c7ce63bea254a583c1fb7d305..83d7f316648890f15e19c51cfaf2531940540a25 100644 (file)
@@ -6,13 +6,16 @@ from pgweb.downloads.models import Product
 from pgweb.profserv.models import ProfessionalService
 from pgweb.quotes.models import Quote
 
+
 # Pending moderation requests (including URLs for the admin interface))
 def _get_unapproved_list(objecttype):
     objects = objecttype.objects.filter(approved=False)
     if not len(objects): return None
-    return { 'name': objects[0]._meta.verbose_name_plural, 'entries':
-             [{'url': '/admin/%s/%s/%s/' % (x._meta.app_label, x._meta.model_name, x.pk), 'title': unicode(x)} for x in objects]
-             }
+    return {
+        'name': objects[0]._meta.verbose_name_plural,
+        'entries': [{'url': '/admin/%s/%s/%s/' % (x._meta.app_label, x._meta.model_name, x.pk), 'title': unicode(x)} for x in objects]
+    }
+
 
 def get_all_pending_moderations():
     applist = [
@@ -22,5 +25,5 @@ def get_all_pending_moderations():
         _get_unapproved_list(Product),
         _get_unapproved_list(ProfessionalService),
         _get_unapproved_list(Quote),
-        ]
+    ]
     return [x for x in applist if x]
index 178ec04d607c347da5dad0ec665d682b1212cfeb..85d6302385e36f4d1353b0ab88b68189482acbcd 100644 (file)
@@ -8,6 +8,7 @@ from pgweb.util.middleware import get_current_user
 from pgweb.util.misc import varnish_purge
 from pgweb.mailqueue.util import send_simple_mail
 
+
 def _build_url(obj):
     if obj.id:
         return "%s/admin/%s/%s/%s/" % (
@@ -23,24 +24,29 @@ def _build_url(obj):
             obj._meta.model_name,
         )
 
+
 def _get_full_text_diff(obj, oldobj):
     fieldlist = _get_all_notification_fields(obj)
     if not fieldlist:
         return "This object does not know how to express ifself."
 
-    s = "\n\n".join(["\n".join(filter(lambda x: not x.startswith('@@'),
-        difflib.unified_diff(
-            _get_attr_value(oldobj, n).splitlines(),
-            _get_attr_value(obj, n).splitlines(),
-            n=1,
-            lineterm='',
-            fromfile=n,
-            tofile=n,
-            ))
+    s = "\n\n".join(["\n".join(
+        filter(
+            lambda x: not x.startswith('@@'),
+            difflib.unified_diff(
+                _get_attr_value(oldobj, n).splitlines(),
+                _get_attr_value(obj, n).splitlines(),
+                n=1,
+                lineterm='',
+                fromfile=n,
+                tofile=n,
+            )
+        )
     ) for n in fieldlist if _get_attr_value(oldobj, n) != _get_attr_value(obj, n)])
     if not s: return None
     return s
 
+
 def _get_all_notification_fields(obj):
     if hasattr(obj, 'notify_fields'):
         return obj.notify_fields
@@ -49,6 +55,7 @@ def _get_all_notification_fields(obj):
         # that are local to this model (not auto created)
         return [f.name for f in obj._meta.get_fields() if not f.name in ('approved', 'submitter', 'id', ) and not f.auto_created]
 
+
 def _get_attr_value(obj, fieldname):
     # see if this is a Many-to-many field. If yes, we want to print
     # it out as a pretty list
@@ -61,6 +68,7 @@ def _get_attr_value(obj, fieldname):
     # Return the value, or an empty tring if it's NULL (migrated records)
     return unicode(getattr(obj, fieldname)) or ''
 
+
 def _get_full_text_representation(obj):
     fieldlist = _get_all_notification_fields(obj)
     if not fieldlist:
@@ -68,6 +76,7 @@ def _get_full_text_representation(obj):
 
     return "\n".join([u'%s: %s' % (n, _get_attr_value(obj, n)) for n in fieldlist])
 
+
 def _get_notification_text(obj):
     try:
         oldobj = obj.__class__.objects.get(pk=obj.pk)
@@ -108,6 +117,7 @@ def _get_notification_text(obj):
         return ('{0} id {1} has been modified'.format(obj._meta.verbose_name, obj.id),
                 'The following fields have been modified:\n\n%s' % diff)
 
+
 def my_pre_save_handler(sender, **kwargs):
     instance = kwargs['instance']
     if getattr(instance, 'send_notification', False) and get_current_user():
@@ -119,28 +129,30 @@ def my_pre_save_handler(sender, **kwargs):
                              "%s by %s" % (subj, get_current_user()),
                              cont)
 
+
 def my_m2m_changed_handler(sender, **kwargs):
     instance = kwargs['instance']
     if getattr(instance, 'send_m2m_notification', False) and get_current_user():
         (cl, f) = sender.__name__.split('_')
         if not hasattr(instance, '_stored_m2m'):
-            instance._stored_m2m={}
+            instance._stored_m2m = {}
         if kwargs['action'] == 'pre_clear':
-            instance._stored_m2m[f] = set([unicode(t) for t in getattr(instance,f).all()])
+            instance._stored_m2m[f] = set([unicode(t) for t in getattr(instance, f).all()])
         elif kwargs['action'] == 'post_add':
-            newset = set([unicode(t) for t in getattr(instance,f).all()])
+            newset = set([unicode(t) for t in getattr(instance, f).all()])
             added = newset.difference(instance._stored_m2m.get(f, set()))
             removed = instance._stored_m2m.get(f, set()).difference(newset)
             subj = '{0} id {1} has been modified'.format(instance._meta.verbose_name, instance.id)
             if added or removed:
                 send_simple_mail(settings.NOTIFICATION_FROM,
-                        settings.NOTIFICATION_EMAIL,
-                        "%s by %s" % (subj, get_current_user()),
-                        "The following values for {0} were changed:\n\n{1}\n{2}\n\n".format(
-                instance._meta.get_field(f).verbose_name,
-                "\n".join([u"Added: %s" % a for a in added]),
-                "\n".join([u"Removed: %s" % r for r in removed]),
-                ))
+                                 settings.NOTIFICATION_EMAIL,
+                                 "%s by %s" % (subj, get_current_user()),
+                                 "The following values for {0} were changed:\n\n{1}\n{2}\n\n".format(
+                                     instance._meta.get_field(f).verbose_name,
+                                     "\n".join([u"Added: %s" % a for a in added]),
+                                     "\n".join([u"Removed: %s" % r for r in removed]),
+                                 ))
+
 
 def my_pre_delete_handler(sender, **kwargs):
     instance = kwargs['instance']
@@ -151,7 +163,8 @@ def my_pre_delete_handler(sender, **kwargs):
                              instance._meta.verbose_name,
                              instance.id,
                              get_current_user()),
-                        _get_full_text_representation(instance))
+                         _get_full_text_representation(instance))
+
 
 def my_post_save_handler(sender, **kwargs):
     instance = kwargs['instance']
@@ -162,6 +175,7 @@ def my_post_save_handler(sender, **kwargs):
             purgelist = instance.purge_urls
         map(varnish_purge, purgelist)
 
+
 def register_basic_signal_handlers():
     pre_save.connect(my_pre_save_handler)
     pre_delete.connect(my_pre_delete_handler)
index 64c04f15205f17cd5dde0a29634f898e0dc64d4c..40d46bc95261db39320ebf7ecad8bf05306a5cd0 100644 (file)
@@ -1,5 +1,6 @@
 from django.conf import settings
 
+
 def get_all_pages_struct(method='get_struct'):
     """
     Return an iterator over all distinct pages on the site.
@@ -13,7 +14,7 @@ def get_all_pages_struct(method='get_struct'):
     for app in settings.INSTALLED_APPS:
         if app.startswith('pgweb.'):
             try:
-                m = __import__(app+".struct", {}, {}, method)
+                m = __import__(app + ".struct", {}, {}, method)
             except:
                 # Failed to import - probably module didnd't exist
                 continue
index 20b45cae98d7efe466e84c9b0d386812dc1cf913..4fa2b9ebf49cf7a23b15867eb22fa7d566135381 100644 (file)
@@ -9,12 +9,15 @@ except ImportError:
 
 _thread_locals = local()
 
+
 def initialize_template_collection():
     _thread_locals.templates = []
 
+
 def get_all_templates():
     return getattr(_thread_locals, 'templates', [])
 
+
 class TrackingTemplateLoader(django.template.loaders.base.Loader):
     def get_template_sources(self, template_name):
         _thread_locals.templates = getattr(_thread_locals, 'templates', []) + [template_name, ]
index db7c7451420fbb1a42812cd8719bbefca279dd8c..6e84e3b46dc7602ac4d8ead8c733868d49e4f878 100755 (executable)
@@ -17,4 +17,3 @@ if __name__ == "__main__":
     r = Random.new()
     key = r.read(32)
     print base64.b64encode(key)
-    
index 2ae543a6bfb169ffbcdbbcd0620b8c6076e6664c..452612c3cf05d8641e00506d8b444a4fc00d49d9 100644 (file)
@@ -35,6 +35,7 @@ from Crypto.Hash import SHA
 from Crypto import Random
 import time
 
+
 class AuthBackend(ModelBackend):
     # We declare a fake backend that always fails direct authentication -
     # since we should never be using direct authentication in the first place!
@@ -62,16 +63,17 @@ def login(request):
         r = Random.new()
         iv = r.read(16)
         encryptor = AES.new(SHA.new(settings.SECRET_KEY).digest()[:16], AES.MODE_CBC, iv)
-        cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) # pad to 16 bytes
+        cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16)))  # pad to 16 bytes
 
         return HttpResponseRedirect("%s?d=%s$%s" % (
-                settings.PGAUTH_REDIRECT,
-                base64.b64encode(iv, "-_"),
-                base64.b64encode(cipher, "-_"),
-                ))
+            settings.PGAUTH_REDIRECT,
+            base64.b64encode(iv, "-_"),
+            base64.b64encode(cipher, "-_"),
+        ))
     else:
         return HttpResponseRedirect(settings.PGAUTH_REDIRECT)
 
+
 # Handle logout requests by logging out of this site and then
 # redirecting to log out from the main site as well.
 def logout(request):
@@ -79,6 +81,7 @@ def logout(request):
         django_logout(request)
     return HttpResponseRedirect("%slogout/" % settings.PGAUTH_REDIRECT)
 
+
 # Receive an authentication response from the main website and try
 # to log the user in.
 def auth_receive(request):
@@ -120,7 +123,7 @@ def auth_receive(request):
             changed = True
         if user.email != data['e'][0]:
             user.email = data['e'][0]
-            changed= True
+            changed = True
         if changed:
             user.save()
     except User.DoesNotExist:
@@ -208,7 +211,7 @@ def user_search(searchterm=None, userid=None):
     u = urllib.urlopen('%ssearch/?%s' % (
         settings.PGAUTH_REDIRECT,
         urllib.urlencode(q),
-        ))
+    ))
     (ivs, datas) = u.read().split('&')
     u.close()
 
@@ -221,6 +224,7 @@ def user_search(searchterm=None, userid=None):
 
     return j
 
+
 # Import a user into the local authentication system. Will initially
 # make a search for it, and if anything other than one entry is returned
 # the import will fail.
index d5c60c71f6d068c429b702f651449ddcd81c8db6..4417c1defe6744ef449dd5d36d64b44b122e5128 100755 (executable)
@@ -46,7 +46,7 @@ if __name__ == "__main__":
         'f': options.first,
         'l': options.last,
         'e': options.email,
-        }
+    }
     if options.suburl:
         info['su'] = options.suburl
 
@@ -54,15 +54,15 @@ if __name__ == "__main__":
     # the first block more random..
     # Since this is a fake authentication, put it 5 minutes into the future to
     # give more time to copy/paste it.
-    s = "t=%s&%s" % (int(time.time()+300), urllib.urlencode(info))
+    s = "t=%s&%s" % (int(time.time() + 300), urllib.urlencode(info))
 
     r = Random.new()
     iv = r.read(16)
     encryptor = AES.new(base64.b64decode(options.key), AES.MODE_CBC, iv)
-    cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16)))
+    cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16)))
 
     print "Paste the following after the receiving url:"
     print "?i=%s&d=%s" % (
         base64.b64encode(iv, "-_"),
         base64.b64encode(cipher, "-_"),
-        )
+    )
index 1d467d37724e4ca5fd30492fe7e9377ce08d61e0..7a2e03021819b29dc54ff539c9e3b0e1da3f7a61 100755 (executable)
@@ -17,17 +17,19 @@ quiet = False
 
 re_titlematch = re.compile('<title\s*>([^<]+)</title\s*>', re.IGNORECASE)
 
-## Load a single page
+
+# Load a single page
 def load_doc_file(filename, f):
-    tidyopts = dict(drop_proprietary_attributes=1,
-                alt_text='',
-                hide_comments=1,
-                output_xhtml=1,
-                show_body_only=1,
-                clean=1,
-                char_encoding='utf8',
-                indent='auto',
-            )
+    tidyopts = dict(
+        drop_proprietary_attributes=1,
+        alt_text='',
+        hide_comments=1,
+        output_xhtml=1,
+        show_body_only=1,
+        clean=1,
+        char_encoding='utf8',
+        indent='auto',
+    )
 
     # Postgres 10 started using xml toolchain and now produces docmentation in utf8. So we need
     # to figure out which version it is.
@@ -56,7 +58,7 @@ def load_doc_file(filename, f):
     if not quiet: print "--- file: %s (%s) ---" % (filename, title)
 
     s = tidy.parseString(contents.encode('utf-8'), **tidyopts)
-    curs.execute("INSERT INTO docs (file, version, title, content) VALUES (%(f)s, %(v)s, %(t)s, %(c)s)",{
+    curs.execute("INSERT INTO docs (file, version, title, content) VALUES (%(f)s, %(v)s, %(t)s, %(c)s)", {
         'f': filename,
         'v': ver,
         't': title,
@@ -65,8 +67,8 @@ def load_doc_file(filename, f):
     global pagecount
     pagecount += 1
 
-## Main execution
 
+# Main execution
 parser = OptionParser(usage="usage: %prog [options] <version> <tarfile>")
 parser.add_option("-q", "--quiet", action="store_true", dest="quiet",
                   help="Run quietly")
@@ -139,4 +141,3 @@ connection.commit()
 connection.close()
 
 if not quiet: print "Done (%i pages)." % pagecount
-
index abaef4fcfb447a5a52e18dbe78a1666c376c60eb..d3d12dd655988de5e23e50a45ae768952e07a5ef 100755 (executable)
@@ -19,12 +19,14 @@ exclude_roots = ['/repos', ]
 
 allnodes = {}
 
+
 def read_file(fn):
     f = codecs.open(fn, 'r', encoding='utf-8', errors='replace')
     t = f.read()
     f.close()
     return t
 
+
 def parse_directory(dirname, rootlen):
     mynode = {}
     for f in os.listdir(dirname):
@@ -39,7 +41,7 @@ def parse_directory(dirname, rootlen):
                 mynode[f] = {
                     't': 'l',
                     'd': os.readlink(fn).strip("/"),
-                    }
+                }
             else:
                 # This is a subdirectory, recurse into it, unless it happens
                 # to be on our exclude list.
@@ -61,6 +63,7 @@ def parse_directory(dirname, rootlen):
 
     allnodes[dirname[rootlen:].strip("/")] = mynode
 
+
 def Usage():
     print "Usage: spider_ftp.py <ftp_root> <pickle_file>"
     print ""
@@ -68,6 +71,7 @@ def Usage():
     print "to that URL instead of written to the filesystem."
     sys.exit(1)
 
+
 if len(sys.argv) != 3: Usage()
 
 parse_directory(sys.argv[1], len(sys.argv[1]))
index bc688d9373fe7b51261962633a982d12af76bff4..7b3a5869978032948264426b7064acd151ae93c1 100755 (executable)
@@ -26,6 +26,7 @@ platform_sort = {
 }
 archs = ['x86_64', 'i386', 'i686', 'ppc64le']
 
+
 def generate_platform(dirname, familyprefix, ver, installer, systemd):
     for f in platform_names.keys():
         yield ('%s-%s' % (f, ver), {
@@ -34,17 +35,19 @@ def generate_platform(dirname, familyprefix, ver, installer, systemd):
             'f': f,
             'i': installer,
             'd': systemd,
-            's': platform_sort[f]*1000-ver,
+            's': platform_sort[f] * 1000 - ver,
             'found': False,
-            })
+        })
+
 
 def get_redhat_systemd(ver):
     return (ver >= 7)
 
+
 platforms = {}
-for v in range(5, 7+1):
+for v in range(5, 7 + 1):
     platforms.update(dict(generate_platform('redhat', 'rhel', v, 'yum', get_redhat_systemd(v))))
-for v in range(24, 30+1):
+for v in range(24, 30 + 1):
     platforms.update(dict(generate_platform('fedora', 'fedora', v, 'dnf', True)))
 
 re_reporpm = re.compile('^pgdg-([a-z0-9-]+)([0-9]{2})-[^-]+-(\d+)\.noarch\.rpm$')
@@ -82,12 +85,12 @@ if __name__ == "__main__":
                             break
                     else:
                         # DEBUG
-#                        print "%s (%s) not found in platform list" % (familypath, shortdist)
+                        # print "%s (%s) not found in platform list" % (familypath, shortdist)
                         pass
 
     # Filter all platforms that are not used
-    platforms = {k:v for k,v in platforms.iteritems() if v['found']}
-    for k,v in platforms.iteritems():
+    platforms = {k: v for k, v in platforms.iteritems() if v['found']}
+    for k, v in platforms.iteritems():
         del v['found']
 
     j = json.dumps({'platforms': platforms, 'reporpms': reporpms})
index ff08ae486b06eb179fa10f545e3a59b26f6438c9..56fe892094c8e264d6d64010c2a6b9c852b26419 100755 (executable)
@@ -19,7 +19,8 @@ import httplib
 import re
 import HTMLParser
 
-BOUNDARY="-=--=foobar-=--="
+BOUNDARY = "-=--=foobar-=--="
+
 
 def encode_multipart_formdata(fields, files):
     L = []
@@ -39,7 +40,8 @@ def encode_multipart_formdata(fields, files):
     body = "\r\n".join(L)
     return body
 
-if __name__=="__main__":
+
+if __name__ == "__main__":
     if len(sys.argv) != 2:
         print "Usage: localhtmlvalidate.py <local url>"
         sys.exit(1)
@@ -53,13 +55,15 @@ if __name__=="__main__":
         firstline = 0
 
     # Generate a form body
-    body = encode_multipart_formdata([
+    body = encode_multipart_formdata(
+        [
             ('charset', 'utf-8'),
             ('doctype', 'inline'),
             ('group', '0'),
             ('verbose', '1'),
-            ],
-                                     [('uploaded_file', 'test.html', contents)])
+        ],
+        [('uploaded_file', 'test.html', contents)]
+    )
 
     # Now submit it to the w3c validator
     h = httplib.HTTP("validator.w3.org")
@@ -92,5 +96,3 @@ if __name__=="__main__":
         print "Unknown status: %s" % headers['x-w3c-validator-status']
         print headers
         sys.exit(1)
-    
-    
index bf2e9f9ee89ad6a532b3c5a151e3d5d3fcd45c2b..4584635dd00cf32940c27778c59aa24933677204 100755 (executable)
@@ -13,7 +13,7 @@ from ConfigParser import ConfigParser
 import psycopg2
 
 # Templates that we don't want to ban automatically
-BANNED_TEMPLATES=(
+BANNED_TEMPLATES = (
     'base/base.html',
 )
 
index 7a2014ab3f7a675ce67fe100369e3439e81b7df0..7dbed9a583199ad38169c8931510f27b766ddf4d 100644 (file)
@@ -8,6 +8,7 @@ import time
 from lib.log import log
 from lib.parsers import ArchivesParser
 
+
 class MultiListCrawler(object):
     def __init__(self, lists, conn, status_interval=30, commit_interval=500):
         self.lists = lists
@@ -27,8 +28,8 @@ class MultiListCrawler(object):
         for listid, listname in self.lists:
             if full:
                 # Generate a sequence of everything to index
-                for year in range(1997, datetime.datetime.now().year+1):
-                    for month in range(1,13):
+                for year in range(1997, datetime.datetime.now().year + 1):
+                    for month in range(1, 13):
                         self.queue.put((listid, listname, year, month, -1))
             elif month:
                 # Do one specific month
@@ -48,18 +49,18 @@ class MultiListCrawler(object):
                 curs = self.conn.cursor()
                 curr = datetime.date.today()
                 if curr.month == 1:
-                    prev = datetime.date(curr.year-1, 12, 1)
+                    prev = datetime.date(curr.year - 1, 12, 1)
                 else:
-                    prev = datetime.date(curr.year, curr.month-1, 1)
+                    prev = datetime.date(curr.year, curr.month - 1, 1)
 
                 for d in curr, prev:
                     # Figure out what the highest indexed page in this
                     # month is.
                     curs.execute("SELECT max(msgnum) FROM messages WHERE list=%(list)s AND year=%(year)s AND month=%(month)s", {
-                            'list': listid,
-                            'year': d.year,
-                            'month': d.month,
-                            })
+                        'list': listid,
+                        'year': d.year,
+                        'month': d.month,
+                    })
                     x = curs.fetchall()
                     if x[0][0] != None:
                         maxmsg = x[0][0]
@@ -69,11 +70,11 @@ class MultiListCrawler(object):
 
         for x in range(5):
             t = threading.Thread(name="Indexer %s" % x,
-                                 target = lambda: self.crawl_from_queue())
-            t.daemon= True
+                                 target=lambda: self.crawl_from_queue())
+            t.daemon = True
             t.start()
 
-        t = threading.Thread(name="statusthread", target = lambda: self.status_thread())
+        t = threading.Thread(name="statusthread", target=lambda: self.status_thread())
         t.daemon = True
         t.start()
 
@@ -93,10 +94,10 @@ class MultiListCrawler(object):
             with self.counterlock:
                 log("Indexed %s messages so far (%s active threads, %s months still queued, %.1f msg/sec)" % (
                     self.counter,
-                    threading.active_count() - 2 , # main thread + status thread
+                    threading.active_count() - 2 # main thread + status thread
                     self.queue.qsize(),
                     self.counter / (nowtime - starttime),
-                    ))
+                ))
                 # Commit every 500 messages
                 if self.counter - lastcommit > self.commit_interval:
                     lastcommit = self.counter
@@ -152,15 +153,15 @@ class MultiListCrawler(object):
             # We return true to move on to the next message anyway!
             return True
         curs.execute("INSERT INTO messages (list, year, month, msgnum, date, subject, author, txt, fti) VALUES (%(listid)s, %(year)s, %(month)s, %(msgnum)s, %(date)s, %(subject)s, %(author)s, %(txt)s, setweight(to_tsvector('pg', %(subject)s), 'A') || to_tsvector('pg', %(txt)s))", {
-                'listid': listid,
-                'year': year,
-                'month': month,
-                'msgnum': msgnum,
-                'date': p.date,
-                'subject': p.subject[:127],
-                'author': p.author[:127],
-                'txt': p.body,
-                })
+            'listid': listid,
+            'year': year,
+            'month': month,
+            'msgnum': msgnum,
+            'date': p.date,
+            'subject': p.subject[:127],
+            'author': p.author[:127],
+            'txt': p.body,
+        })
         with self.counterlock:
             self.counter += 1
 
index 2154e0b14ab3b7ed3e649eaf32e83146bd094757..173cf0c89aa685de8706c19819bebac80852866d 100644 (file)
@@ -11,6 +11,7 @@ import threading
 from lib.log import log
 from lib.parsers import GenericHtmlParser, lossy_unicode
 
+
 class BaseSiteCrawler(object):
     def __init__(self, hostname, dbconn, siteid, serverip=None, https=False):
         self.hostname = hostname
@@ -37,11 +38,11 @@ class BaseSiteCrawler(object):
         # Fire off worker threads
         for x in range(5):
             t = threading.Thread(name="Indexer %s" % x,
-                       target = lambda: self.crawl_from_queue())
+                                 target=lambda: self.crawl_from_queue())
             t.daemon = True
             t.start()
 
-        t = threading.Thread(name="statusthread", target = lambda: self.status_thread())
+        t = threading.Thread(name="statusthread", target=lambda: self.status_thread())
         t.daemon = True
         t.start()
 
@@ -53,9 +54,9 @@ class BaseSiteCrawler(object):
         # Remove all pages that we didn't crawl
         curs = self.dbconn.cursor()
         curs.execute("DELETE FROM webpages WHERE site=%(site)s AND NOT suburl=ANY(%(urls)s)", {
-                'site': self.siteid,
-                'urls': self.pages_crawled.keys(),
-                })
+            'site': self.siteid,
+            'urls': self.pages_crawled.keys(),
+        })
         if curs.rowcount:
             log("Deleted %s pages no longer accessible" % curs.rowcount)
         self.pages_deleted += curs.rowcount
@@ -77,7 +78,7 @@ class BaseSiteCrawler(object):
                     threading.active_count() - 2,
                     self.queue.qsize(),
                     len(self.pages_crawled) / (nowtime - starttime),
-                    ))
+                ))
 
     def crawl_from_queue(self):
         while not self.stopevent.is_set():
@@ -92,7 +93,7 @@ class BaseSiteCrawler(object):
         return False
 
     def crawl_page(self, url, relprio, internal):
-        if self.pages_crawled.has_key(url) or self.pages_crawled.has_key(url+"/"):
+        if self.pages_crawled.has_key(url) or self.pages_crawled.has_key(url + "/"):
             return
 
         if self.exclude_url(url):
@@ -110,9 +111,9 @@ class BaseSiteCrawler(object):
             # Page failed to load or was a redirect, so remove from database
             curs = self.dbconn.cursor()
             curs.execute("DELETE FROM webpages WHERE site=%(id)s AND suburl=%(url)s", {
-                    'id': self.siteid,
-                    'url': url,
-                    })
+                'id': self.siteid,
+                'url': url,
+            })
             with self.counterlock:
                 self.pages_deleted += curs.rowcount
 
@@ -145,7 +146,7 @@ class BaseSiteCrawler(object):
             'url': url,
             'relprio': relprio,
             'internal': internal,
-            }
+        }
         curs = self.dbconn.cursor()
         curs.execute("UPDATE webpages SET title=%(title)s, txt=%(txt)s, fti=setweight(to_tsvector('public.pg', %(title)s), 'A') || to_tsvector('public.pg', %(txt)s), lastscanned=%(lastmod)s, relprio=%(relprio)s, isinternal=%(internal)s WHERE site=%(site)s AND suburl=%(url)s", params)
         if curs.rowcount != 1:
@@ -157,10 +158,11 @@ class BaseSiteCrawler(object):
                 self.pages_updated += 1
 
     ACCEPTED_CONTENTTYPES = ("text/html", "text/plain", )
+
     def accept_contenttype(self, contenttype):
         # Split apart if there is a "; charset=" in it
         if contenttype.find(";"):
-            contenttype = contenttype.split(';',2)[0]
+            contenttype = contenttype.split(';', 2)[0]
         return contenttype in self.ACCEPTED_CONTENTTYPES
 
     def fetch_page(self, url):
@@ -180,8 +182,8 @@ class BaseSiteCrawler(object):
                 else:
                     h = httplib.HTTPSConnection(host=self.hostname, port=443, strict=True, timeout=10, context=ssl._create_unverified_context())
                 h.putrequest("GET", url)
-            h.putheader("User-agent","pgsearch/0.2")
-            h.putheader("Connection","close")
+            h.putheader("User-agent", "pgsearch/0.2")
+            h.putheader("Connection", "close")
             if self.scantimes.has_key(url):
                 h.putheader("If-Modified-Since", formatdate(time.mktime(self.scantimes[url].timetuple())))
             h.endheaders()
@@ -209,7 +211,7 @@ class BaseSiteCrawler(object):
                 # No redirect at all found, becaue it was invalid?
                 return (2, None, None)
             else:
-                #print "Url %s returned status %s" % (url, resp.status)
+                # print "Url %s returned status %s" % (url, resp.status)
                 pass
         except Exception, e:
             log("Exception when loading url %s: %s" % (url, e))
index 718b1f0dd92072530811a5e0e1fe29cbd0cd0429..aa8b9e09f7cd4c935f2a7bf77edcedbe8493443d 100644 (file)
@@ -3,6 +3,7 @@ import re
 from basecrawler import BaseSiteCrawler
 from parsers import RobotsParser
 
+
 class GenericSiteCrawler(BaseSiteCrawler):
     def __init__(self, hostname, dbconn, siteid, https=False):
         super(GenericSiteCrawler, self).__init__(hostname, dbconn, siteid, https=https)
@@ -19,8 +20,8 @@ class GenericSiteCrawler(BaseSiteCrawler):
         # robots.txt ones)
         curs = self.dbconn.cursor()
         curs.execute("SELECT suburlre FROM site_excludes WHERE site=%(site)s", {
-                'site': self.siteid,
-                })
+            'site': self.siteid,
+        })
         self.extra_excludes = [re.compile(x) for x, in curs.fetchall()]
 
         # We *always* crawl the root page, of course
@@ -45,7 +46,7 @@ class GenericSiteCrawler(BaseSiteCrawler):
 
     def post_process_page(self, url):
         for l in self.resolve_links(self.page.links, url):
-            if self.pages_crawled.has_key(l) or self.pages_crawled.has_key(l+"/"):
+            if self.pages_crawled.has_key(l) or self.pages_crawled.has_key(l + "/"):
                 continue
             if self.exclude_url(l):
                 continue
index ce566034713a555f37d8b5f62aff37902fa73a1b..08e8de5c03d0e03beeafb7f4ab9994befd7c48bd 100644 (file)
@@ -1,6 +1,7 @@
 # Yes, this is trivial, but we might want to put something
 # more here in the future :)
 import datetime
+
+
 def log(msg):
     print "%s: %s" % (datetime.datetime.now(), msg)
-
index 89f0ff128a6187df740df68f735898d5f4e0d9b4..b176edd3b9f44c08996f0f35a31dc54972fce62c 100644 (file)
@@ -8,6 +8,7 @@ from HTMLParser import HTMLParser
 
 from lib.log import log
 
+
 class GenericHtmlParser(HTMLParser):
     def __init__(self):
         HTMLParser.__init__(self)
@@ -22,7 +23,7 @@ class GenericHtmlParser(HTMLParser):
         if tag == "body":
             self.inbody = True
         if tag == "a":
-            for a,v in attrs:
+            for a, v in attrs:
                 if a == "href":
                     self.links.append(v)
 
@@ -31,6 +32,7 @@ class GenericHtmlParser(HTMLParser):
             self.inbody = False
 
     DATA_IGNORE_TAGS = ("script",)
+
     def handle_data(self, data):
         d = data.strip()
         if len(d) < 2:
@@ -59,6 +61,7 @@ class GenericHtmlParser(HTMLParser):
 class ArchivesParser(object):
     rematcher = re.compile("<!--X-Subject: ([^\n]*) -->.*<!--X-From-R13: ([^\n]*) -->.*<!--X-Date: ([^\n]*) -->.*<!--X-Body-of-Message-->(.*)<!--X-Body-of-Message-End-->", re.DOTALL)
     hp = HTMLParser()
+
     def __init__(self):
         self.subject = None
         self.author = None
@@ -79,6 +82,7 @@ class ArchivesParser(object):
 
     _date_multi_re = re.compile(' \((\w+\s\w+|)\)$')
     _date_trailing_envelope = re.compile('\s+\(envelope.*\)$')
+
     def parse_date(self, d):
         # For some reason, we have dates that look like this:
         # http://archives.postgresql.org/pgsql-bugs/1999-05/msg00018.php
@@ -126,11 +130,13 @@ class ArchivesParser(object):
     # So we copy the brokenness here.
     # This code is from MHonArc/ewhutil.pl, mrot13()
     _arot13_trans = dict(zip(map(ord,
-        u'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[abcdefghijklmnopqrstuvwxyz'),
-        u'NOPQRSTUVWXYZ[@ABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm'))
+                                 u'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[abcdefghijklmnopqrstuvwxyz'),
+                             u'NOPQRSTUVWXYZ[@ABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm'))
+
     def almost_rot13(self, s):
         return unicode(s).translate(self._arot13_trans)
 
+
 class RobotsParser(object):
     def __init__(self, url):
         try:
index 25d6fb72ba6fc02bbcfa5cb3eaf8ae048407475a..fddd3ffecf4c9ef34b46b6147d92b5231b84482b 100644 (file)
@@ -5,6 +5,7 @@ import dateutil.parser
 from lib.log import log
 from lib.basecrawler import BaseSiteCrawler
 
+
 class SitemapParser(object):
     def __init__(self):
         self.urls = []
@@ -19,7 +20,7 @@ class SitemapParser(object):
         self.getlastmod = False
         self.currstr = ""
         self.internal = False
-        self.parser.StartElementHandler = lambda name,attrs: self.processelement(name,attrs)
+        self.parser.StartElementHandler = lambda name, attrs: self.processelement(name, attrs)
         self.parser.EndElementHandler = lambda name: self.processendelement(name)
         self.parser.CharacterDataHandler = lambda data: self.processcharacterdata(data)
         self.internal = internal
@@ -58,6 +59,7 @@ class SitemapParser(object):
         if self.geturl or self.getprio or self.getlastmod:
             self.currstr += data
 
+
 class SitemapSiteCrawler(BaseSiteCrawler):
     def __init__(self, hostname, dbconn, siteid, serverip, https=False):
         super(SitemapSiteCrawler, self).__init__(hostname, dbconn, siteid, serverip, https)
@@ -81,7 +83,7 @@ class SitemapSiteCrawler(BaseSiteCrawler):
 
         for url, prio, lastmod, internal in p.urls:
             # Advance 8 characters - length of https://.
-            url = url[len(self.hostname)+8:]
+            url = url[len(self.hostname) + 8:]
             if lastmod:
                 if self.scantimes.has_key(url):
                     if lastmod < self.scantimes[url]:
index b70571dd3c2b8f5e34fc45e2edac42c68d925983..87c702652fb2cabbb355ec6c39f0fc78977f527c 100644 (file)
@@ -1,5 +1,6 @@
 from multiprocessing import Process
 
+
 # Wrap a method call in a different process, so that we can process
 # keyboard interrupts and actually terminate it if we have to.
 # python threading makes it often impossible to Ctlr-C it otherwise..
index ea11bec2e4682a31f063070b76fe913e7f4ececc..445086107e2ceae9de46a6652f085fc4a9ac5aca 100755 (executable)
@@ -10,23 +10,24 @@ import psycopg2
 import sys
 import time
 
+
 def doit(opt):
     cp = ConfigParser()
     cp.read("search.ini")
     psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
-    conn = psycopg2.connect(cp.get("search","db"))
+    conn = psycopg2.connect(cp.get("search", "db"))
 
     curs = conn.cursor()
 
     if opt.list:
         # Multiple lists can be specified with a comma separator (no spaces)
         curs.execute("SELECT id,name FROM lists WHERE name=ANY(%(names)s)", {
-                'names': opt.list.split(','),
-                })
+            'names': opt.list.split(','),
+        })
     else:
         curs.execute("SELECT id,name FROM lists WHERE active ORDER BY id")
 
-    listinfo = [(id,name) for id,name in curs.fetchall()]
+    listinfo = [(id, name) for id, name in curs.fetchall()]
     c = MultiListCrawler(listinfo, conn, opt.status_interval, opt.commit_interval)
     n = c.crawl(opt.full, opt.month)
 
@@ -39,7 +40,8 @@ def doit(opt):
     log("Indexed %s messages" % n)
     time.sleep(1)
 
-if __name__=="__main__":
+
+if __name__ == "__main__":
     parser = OptionParser()
     parser.add_option("-l", "--list", dest='list', help="Crawl only this list")
     parser.add_option("-m", "--month", dest='month', help="Crawl only this month")
index c5bc72c6636f949bfa35cd5067850f07c330535f..815d24ae8773a6a3a20543edec29104027b44464 100755 (executable)
@@ -7,11 +7,11 @@ import psycopg2
 import urllib
 import simplejson as json
 
-if __name__=="__main__":
+if __name__ == "__main__":
     cp = ConfigParser()
     cp.read("search.ini")
     psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
-    conn = psycopg2.connect(cp.get("search","db"))
+    conn = psycopg2.connect(cp.get("search", "db"))
     curs = conn.cursor()
 
     u = urllib.urlopen("http://%s/community/lists/listinfo/" % cp.get("search", "web"))
index 2f895f17f3375f30c06d2a50ded94fb0f2b7e51c..f3f139a70933488a444cf45a61c684f9509b0207 100755 (executable)
@@ -10,9 +10,10 @@ from ConfigParser import ConfigParser
 import psycopg2
 import time
 
+
 def doit():
     psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
-    conn = psycopg2.connect(cp.get("search","db"))
+    conn = psycopg2.connect(cp.get("search", "db"))
 
     curs = conn.cursor()
 
@@ -34,7 +35,7 @@ def doit():
     time.sleep(1)
 
 
-if __name__=="__main__":
+if __name__ == "__main__":
     cp = ConfigParser()
     cp.read("search.ini")
 
index b9fa622ef9b4ad502e18350c50935060bd7ffa3c..a7cbd011dfc8f9876023fe09e0e07ad3c6093e0b 100755 (executable)
@@ -5,9 +5,9 @@ import psycopg2
 from datetime import timedelta
 
 # Up to 5 minutes delay is ok
-WARNING_THRESHOLD=timedelta(minutes=5)
+WARNING_THRESHOLD = timedelta(minutes=5)
 # More than 15 minutes something is definitely wrong
-CRITICAL_THRESHOLD=timedelta(minutes=15)
+CRITICAL_THRESHOLD = timedelta(minutes=15)
 
 if __name__ == "__main__":
     if len(sys.argv) != 2:
index c66d87dca4e930e51d19ed8ac089e1a13fc2deb2..310967592b376f2dbda08d7b5f73e4a784abcd38 100755 (executable)
@@ -15,6 +15,7 @@ import logging
 import psycopg2
 from setproctitle import setproctitle
 
+
 def do_purge(consumername, headers):
     try:
         conn = httplib.HTTPSConnection('%s.postgresql.org' % consumername)
@@ -30,6 +31,7 @@ def do_purge(consumername, headers):
         return False
     return True
 
+
 def worker(consumerid, consumername, dsn):
     logging.info("Starting worker for %s" % consumername)
     setproctitle("varnish_queue - worker for %s" % consumername)
@@ -85,7 +87,7 @@ def worker(consumerid, consumername, dsn):
             # Nothing, so roll back the transaction and wait
             conn.rollback()
 
-            select.select([conn],[],[],5*60)
+            select.select([conn], [], [], 5 * 60)
             conn.poll()
             while conn.notifies:
                 conn.notifies.pop()
@@ -104,7 +106,7 @@ def housekeeper(dsn):
             conn.commit()
         else:
             conn.rollback()
-        time.sleep(5*60)
+        time.sleep(5 * 60)
 
 
 if __name__ == "__main__":