from models import CommunityAuthSite, CommunityAuthOrg
+
class CommunityAuthSiteAdminForm(forms.ModelForm):
class Meta:
model = CommunityAuthSite
raise forms.ValidationError("Crypto key must be 16, 24 or 32 bytes before being base64-encoded")
return self.cleaned_data['cryptkey']
+
class CommunityAuthSiteAdmin(admin.ModelAdmin):
form = CommunityAuthSiteAdminForm
+
class PGUserChangeForm(UserChangeForm):
"""just like UserChangeForm, butremoves "username" requirement"""
def __init__(self, *args, **kwargs):
if self.fields.get('username'):
del self.fields['username']
+
class PGUserAdmin(UserAdmin):
"""overrides default Django user admin"""
form = PGUserChangeForm
return self.readonly_fields + ('username',)
return self.readonly_fields
+
admin.site.register(CommunityAuthSite, CommunityAuthSiteAdmin)
admin.site.register(CommunityAuthOrg)
-admin.site.unregister(User) # have to unregister default User Admin...
-admin.site.register(User, PGUserAdmin) # ...in order to add overrides
+admin.site.unregister(User) # have to unregister default User Admin...
+admin.site.register(User, PGUserAdmin) # ...in order to add overrides
import logging
log = logging.getLogger(__name__)
+
def _clean_username(username):
username = username.lower()
return username
raise forms.ValidationError("This username is already in use")
+
# Override some error handling only in the default authentication form
class PgwebAuthenticationForm(AuthenticationForm):
def clean(self):
return self.cleaned_data
raise e
+
class CommunityAuthConsentForm(forms.Form):
consent = forms.BooleanField(help_text='Consent to sharing this data')
next = forms.CharField(widget=forms.widgets.HiddenInput())
self.fields['consent'].label = 'Consent to sharing data with {0}'.format(self.orgname)
+
class SignupForm(forms.Form):
username = forms.CharField(max_length=30)
first_name = forms.CharField(max_length=30)
return email
raise forms.ValidationError("A user with this email address is already registered")
+
class SignupOauthForm(forms.Form):
username = forms.CharField(max_length=30)
first_name = forms.CharField(max_length=30, required=False)
def clean_email(self):
return self.cleaned_data['email'].lower()
+
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
exclude = ('user',)
+
class UserForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
+
class Meta:
model = User
fields = ('first_name', 'last_name', )
+
class ContributorForm(forms.ModelForm):
class Meta:
model = Contributor
exclude = ('ctype', 'lastname', 'firstname', 'user', )
+
class ChangeEmailForm(forms.Form):
email = forms.EmailField()
email2 = forms.EmailField(label="Repeat email")
raise forms.ValidationError("Email addresses don't match")
return email2
+
class PgwebPasswordResetForm(forms.Form):
email = forms.EmailField()
from django.db import models
from django.contrib.auth.models import User
+
class CommunityAuthOrg(models.Model):
orgname = models.CharField(max_length=100, null=False, blank=False,
help_text="Name of the organisation")
def __unicode__(self):
return self.orgname
+
class CommunityAuthSite(models.Model):
name = models.CharField(max_length=100, null=False, blank=False,
help_text="Note that the value in this field is shown on the login page, so make sure it's user-friendly!")
def __unicode__(self):
return self.name
+
class CommunityAuthConsent(models.Model):
user = models.ForeignKey(User, null=False, blank=False)
org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False)
class Meta:
unique_together = (('user', 'org'), )
+
class EmailChangeToken(models.Model):
user = models.OneToOneField(User, null=False, blank=False)
email = models.EmailField(max_length=75, null=False, blank=False)
class OAuthException(Exception):
pass
+
#
# Generic OAuth login for multiple providers
#
'google',
'https://accounts.google.com/o/oauth2/v2/auth',
'https://accounts.google.com/o/oauth2/token',
- ['https://www.googleapis.com/auth/userinfo.email',
- 'https://www.googleapis.com/auth/userinfo.profile'],
+ [
+ 'https://www.googleapis.com/auth/userinfo.email',
+ 'https://www.googleapis.com/auth/userinfo.profile'
+ ],
_google_auth_data)
+
#
# Github login
# Registration: https://github.com/settings/developers
else:
# Some github accounts have no name on them, so we can just
# let the user fill it out manually in that case.
- n = ['','']
+ n = ['', '']
# Email is at a separate endpoint
r = oa.get('https://api.github.com/user/emails').json()
for e in r:
['user:email', ],
_github_auth_data)
+
#
# Facebook login
# Registration: https://developers.facebook.com/apps
'microsoft',
'https://login.live.com/oauth20_authorize.srf',
'https://login.live.com/oauth20_token.srf',
- ['wl.basic', 'wl.emails' ],
+ ['wl.basic', 'wl.emails', ],
_microsoft_auth_data)
import logging
log = logging.getLogger(__name__)
+
class ReCaptchaWidget(forms.widgets.Widget):
def render(self, name, value, attrs=None):
if settings.NOCAPTCHA:
# The value we store in user.password for oauth logins. This is
# a value that must not match any hashers.
-OAUTH_PASSWORD_STORE='oauth_signin_account_no_password'
+OAUTH_PASSWORD_STORE = 'oauth_signin_account_no_password'
+
@login_required
def home(request):
'profservs': myprofservs,
})
+
objtypes = {
'news': {
'title': 'News Article',
},
}
+
@login_required
@transaction.atomic
def profile(request):
contribform = ContributorForm(instance=contrib)
return render_pgweb(request, 'account', 'account/userprofileform.html', {
- 'userform': userform,
- 'profileform': profileform,
- 'contribform': contribform,
- 'can_change_email': can_change_email,
- })
+ 'userform': userform,
+ 'profileform': profileform,
+ 'contribform': contribform,
+ 'can_change_email': can_change_email,
+ })
+
@login_required
@transaction.atomic
token=generate_random_token())
token.save()
- send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
- form.cleaned_data['email'],
- 'Your postgresql.org community account',
- 'account/email_change_email.txt',
- { 'token': token , 'user': request.user, }
- )
+ send_template_mail(
+ settings.ACCOUNTS_NOREPLY_FROM,
+ form.cleaned_data['email'],
+ 'Your postgresql.org community account',
+ 'account/email_change_email.txt',
+ {'token': token, 'user': request.user, }
+ )
return HttpResponseRedirect('done/')
else:
form = ChangeEmailForm(request.user)
return render_pgweb(request, 'account', 'account/emailchangeform.html', {
'form': form,
'token': token,
- })
+ })
+
@login_required
@transaction.atomic
return render_pgweb(request, 'account', 'account/emailchangecompleted.html', {
'token': tokenhash,
'success': token and True or False,
- })
+ })
+
@login_required
def listobjects(request, objtype):
'suburl': objtype,
})
+
@login_required
def orglist(request):
orgs = Organisation.objects.filter(approved=True)
return render_pgweb(request, 'account', 'account/orglist.html', {
- 'orgs': orgs,
+ 'orgs': orgs,
})
+
def login(request):
return authviews.login(request, template_name='account/login.html',
authentication_form=PgwebAuthenticationForm,
extra_context={
- 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())],
+ 'oauth_providers': [(k, v) for k, v in sorted(settings.OAUTH.items())],
})
+
def logout(request):
return authviews.logout_then_login(request, login_url='/')
+
def changepwd(request):
if hasattr(request.user, 'password') and request.user.password == OAUTH_PASSWORD_STORE:
return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.")
template_name='account/password_change.html',
post_change_redirect='/account/changepwd/done/')
+
def resetpwd(request):
# Basic django password reset feature is completely broken. For example, it does not support
# resetting passwords for users with "old hashes", which means they have no way to ever
if form.is_valid():
log.info("Initiating password set from {0} for {1}".format(get_client_ip(request), form.cleaned_data['email']))
token = default_token_generator.make_token(u)
- send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
- form.cleaned_data['email'],
- 'Password reset for your postgresql.org account',
- 'account/password_reset_email.txt',
- {
- 'user': u,
- 'uid': urlsafe_base64_encode(force_bytes(u.pk)),
- 'token': token,
- },
+ send_template_mail(
+ settings.ACCOUNTS_NOREPLY_FROM,
+ form.cleaned_data['email'],
+ 'Password reset for your postgresql.org account',
+ 'account/password_reset_email.txt',
+ {
+ 'user': u,
+ 'uid': urlsafe_base64_encode(force_bytes(u.pk)),
+ 'token': token,
+ },
)
return HttpResponseRedirect('/account/reset/done/')
else:
form = PgwebPasswordResetForm()
return render_pgweb(request, 'account', 'account/password_reset.html', {
- 'form': form,
+ 'form': form,
})
+
def change_done(request):
log.info("Password change done from {0}".format(get_client_ip(request)))
return authviews.password_change_done(request, template_name='account/password_change_done.html')
+
def reset_done(request):
log.info("Password reset done from {0}".format(get_client_ip(request)))
return authviews.password_reset_done(request, template_name='account/password_reset_done.html')
+
def reset_confirm(request, uidb64, token):
log.info("Confirming password reset for uidb {0}, token {1} from {2}".format(uidb64, token, get_client_ip(request)))
return authviews.password_reset_confirm(request,
template_name='account/password_reset_confirm.html',
post_reset_redirect='/account/reset/complete/')
+
def reset_complete(request):
log.info("Password reset completed for user from {0}".format(get_client_ip(request)))
return authviews.password_reset_complete(request, template_name='account/password_reset_complete.html')
+
@script_sources('https://www.google.com/recaptcha/')
@script_sources('https://www.gstatic.com/recaptcha/')
@frame_sources('https://www.google.com/')
form.cleaned_data['email'],
'Your new postgresql.org community account',
'account/new_account_email.txt',
- { 'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user}
+ {'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user}
)
return HttpResponseRedirect('/account/signup/complete/')
form = SignupForm(get_client_ip(request))
return render_pgweb(request, 'account', 'base/form.html', {
- 'form': form,
- 'formitemtype': 'Account',
- 'form_intro': """
+ 'form': form,
+ 'formitemtype': 'Account',
+ 'form_intro': """
To sign up for a free community account, enter your preferred userid and email address.
Note that a community account is only needed if you want to submit information - all
content is available for reading without an account.
""",
- 'savebutton': 'Sign up',
- 'operation': 'New',
- 'recaptcha': True,
+ 'savebutton': 'Sign up',
+ 'operation': 'New',
+ 'recaptcha': True,
})
'operation': 'New account',
'savebutton': 'Sign up for new account',
'recaptcha': True,
- })
+ })
+
####
-## Community authentication endpoint
+# Community authentication endpoint
####
-
def communityauth(request, siteid):
# Get whatever site the user is trying to log in to.
site = get_object_or_404(CommunityAuthSite, pk=siteid)
nexturl = request.POST['next']
else:
nexturl = '/account/auth/%s/%s' % (siteid, urldata)
- return authviews.login(request, template_name='account/login.html',
- authentication_form=PgwebAuthenticationForm,
- extra_context={
- 'sitename': site.name,
- 'next': nexturl,
- 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())],
- },
- )
+ return authviews.login(
+ request, template_name='account/login.html',
+ authentication_form=PgwebAuthenticationForm,
+ extra_context={
+ 'sitename': site.name,
+ 'next': nexturl,
+ 'oauth_providers': [(k, v) for k, v in sorted(settings.OAUTH.items())],
+ },
+ )
# When we reach this point, the user *has* already been authenticated.
# The request variable "su" *may* contain a suburl and should in that
# case be passed along to the site we're authenticating for. And of
# course, we fill a structure with information about the user.
- if request.user.first_name=='' or request.user.last_name=='' or request.user.email=='':
+ if request.user.first_name == '' or request.user.last_name == '' or request.user.email == '':
return render_pgweb(request, 'account', 'account/communityauth_noinfo.html', {
- })
+ })
# Check for cooloff period
if site.cooloff_hours > 0:
request.user.username, site.name))
return render_pgweb(request, 'account', 'account/communityauth_cooloff.html', {
'site': site,
- })
+ })
if site.org.require_consent:
if not CommunityAuthConsent.objects.filter(org=site.org, user=request.user).exists():
'f': request.user.first_name.encode('utf-8'),
'l': request.user.last_name.encode('utf-8'),
'e': request.user.email.encode('utf-8'),
- }
+ }
if d:
info['d'] = d.encode('utf-8')
elif su:
# Encrypt it with the shared key (and IV!)
r = Random.new()
- iv = r.read(16) # Always 16 bytes for AES
+ iv = r.read(16) # Always 16 bytes for AES
encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
- cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
+ cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16))) # Pad to even 16 bytes
# Generate redirect
return HttpResponseRedirect("%s?i=%s&d=%s" % (
- site.redirecturl,
- base64.b64encode(iv, "-_"),
- base64.b64encode(cipher, "-_"),
- ))
+ site.redirecturl,
+ base64.b64encode(iv, "-_"),
+ base64.b64encode(cipher, "-_"),
+ ))
def communityauth_logout(request, siteid):
# Redirect user back to the specified suburl
return HttpResponseRedirect("%s?s=logout" % site.redirecturl)
+
@login_required
def communityauth_consent(request, siteid):
org = get_object_or_404(CommunityAuthSite, id=siteid).org
form = CommunityAuthConsentForm(org.orgname, data=request.POST)
if form.is_valid():
CommunityAuthConsent.objects.get_or_create(user=request.user, org=org,
- defaults={'consentgiven':datetime.now()},
+ defaults={'consentgiven': datetime.now()},
)
return HttpResponseRedirect(form.cleaned_data['next'])
else:
def _encrypt_site_response(site, s):
# Encrypt it with the shared key (and IV!)
r = Random.new()
- iv = r.read(16) # Always 16 bytes for AES
+ iv = r.read(16) # Always 16 bytes for AES
encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
- cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
+ cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16))) # Pad to even 16 bytes
# Base64-encode the response, just to be consistent
return "%s&%s" % (
base64.b64encode(cipher, '-_'),
)
+
def communityauth_search(request, siteid):
# Perform a search for users. The response will be encrypted with the site
# key to prevent abuse, therefor we need the site.
return HttpResponse(_encrypt_site_response(site, j))
+
def communityauth_getkeys(request, siteid, since=None):
# Get any updated ssh keys for community accounts.
# The response will be encrypted with the site key to prevent abuse,
from models import Contributor, ContributorType
+
class ContributorAdminForm(forms.ModelForm):
class Meta:
model = Contributor
self.fields['user'].widget.can_add_related = False
self.fields['user'].widget.can_change_related = False
+
class ContributorAdmin(admin.ModelAdmin):
form = ContributorAdminForm
+
admin.site.register(ContributorType)
admin.site.register(Contributor, ContributorAdmin)
from django.db import models
from django.contrib.auth.models import User
+
class ContributorType(models.Model):
typename = models.CharField(max_length=32, null=False, blank=False)
sortorder = models.IntegerField(null=False, default=100)
class Meta:
ordering = ('sortorder',)
+
class Contributor(models.Model):
ctype = models.ForeignKey(ContributorType)
lastname = models.CharField(max_length=100, null=False, blank=False)
contribution = models.TextField(null=True, blank=True)
user = models.ForeignKey(User, null=True, blank=True)
- send_notification=True
+ send_notification = True
purge_urls = ('/community/contributors/', )
def __unicode__(self):
from models import ContributorType
+
def completelist(request):
contributortypes = list(ContributorType.objects.all())
return render_pgweb(request, 'community', 'contributors/list.html', {
from pgweb.core.lookups import UserLookup
+
class OrganisationAdminForm(forms.ModelForm):
class Meta:
model = Organisation
self.fields['managers'].widget.can_change_related = False
self.fields['managers'].widget.can_delete_related = False
+
class OrganisationAdmin(admin.ModelAdmin):
form = OrganisationAdminForm
list_display = ('name', 'approved', 'lastconfirmed',)
ordering = ('name', )
search_fields = ('name', )
+
class VersionAdmin(admin.ModelAdmin):
list_display = ('versionstring', 'reldate', 'supported', 'current', )
+
admin.site.register(Version, VersionAdmin)
admin.site.register(OrganisationType)
admin.site.register(Organisation, OrganisationAdmin)
admin.site.register(ImportedRSSFeed)
admin.site.register(ImportedRSSItem)
admin.site.register(ModerationNotification)
-
from datetime import datetime, time
+
class VersionFeed(Feed):
title = "PostgreSQL latest versions"
link = "https://www.postgresql.org/"
return "https://www.postgresql.org/docs/%s/%s" % (obj.numtree, obj.relnotes)
def item_pubdate(self, obj):
- return datetime.combine(obj.reldate,time.min)
+ return datetime.combine(obj.reldate, time.min)
from models import Organisation
from django.contrib.auth.models import User
+
class OrganisationForm(forms.ModelForm):
remove_manager = forms.ModelMultipleChoiceField(required=False, queryset=None, label="Current manager(s)", help_text="Select one or more managers to remove")
add_manager = forms.EmailField(required=False)
def apply_submitter(self, model, User):
model.managers.add(User)
+
class MergeOrgsForm(forms.Form):
merge_into = forms.ModelChoiceField(queryset=Organisation.objects.all())
merge_from = forms.ModelChoiceField(queryset=Organisation.objects.all())
# Display for choice listings
return u"%s (%s)" % (item.username, item.get_full_name())
+
registry.register(UserLookup)
from pgweb.account.models import EmailChangeToken
+
class Command(BaseCommand):
help = 'Cleanup old records'
# Clean up old email change tokens
with transaction.atomic():
- EmailChangeToken.objects.filter(sentat__lt=datetime.now()-timedelta(hours=24)).delete()
+ EmailChangeToken.objects.filter(sentat__lt=datetime.now() - timedelta(hours=24)).delete()
from pgweb.core.models import ImportedRSSFeed, ImportedRSSItem
+
class Command(BaseCommand):
help = 'Fetch RSS feeds'
if not hasattr(feed, 'status'):
# bozo_excpetion can seemingly be set when there is no error as well,
# so make sure we only check if we didn't get a status.
- if hasattr(feed,'bozo_exception'):
+ if hasattr(feed, 'bozo_exception'):
raise Exception('Feed load error %s' % feed.bozo_exception)
raise Exception('Feed load error with no exception!')
if feed.status != 200:
try:
item = ImportedRSSItem.objects.get(feed=importfeed, url=entry.link)
except ImportedRSSItem.DoesNotExist:
- item = ImportedRSSItem(feed=importfeed,
- title=entry.title[:100],
- url=entry.link,
- posttime=datetime(*(entry.published_parsed[0:6])),
+ item = ImportedRSSItem(
+ feed=importfeed,
+ title=entry.title[:100],
+ url=entry.link,
+ posttime=datetime(*(entry.published_parsed[0:6])),
)
item.save()
fetchedsomething = True
from pgweb.util.moderation import get_all_pending_moderations
from pgweb.util.misc import send_template_mail
+
class Command(BaseCommand):
help = 'Send moderation report'
"core/moderation_report.txt",
{
'items': counts,
- })
+ })
from django.contrib.sessions.models import Session
from django.contrib.auth.models import User
+
class Command(BaseCommand):
help = 'Dump interesting information about a session'
session.pop(k, None)
if session:
print " -- Other session values --"
- for k,v in session.items():
- print u"{0:20} {1}".format(k,v)
+ for k, v in session.items():
+ print u"{0:20} {1}".format(k, v)
except Session.DoesNotExist:
raise CommandError('Session not found')
-
import pgweb.core.models
+
class Migration(migrations.Migration):
dependencies = [
(1, 'Release candidate'),
(2, 'Beta'),
(3, 'Alpha'),
- )
+)
TESTING_SHORTSTRING = ('', 'rc', 'beta', 'alpha')
+
class Version(models.Model):
tree = models.DecimalField(max_digits=3, decimal_places=1, null=False, blank=False, unique=True)
latestminor = models.IntegerField(null=False, blank=False, default=0, help_text="For testing versions, latestminor means latest beta/rc number. For other releases, it's the latest minor release number in the tree.")
for p in previous:
if not p == self:
p.current = False
- p.save() # primary key check avoids recursion
+ p.save() # primary key check avoids recursion
# Now that we've made any previously current ones non-current, we are
# free to save this one.
def __unicode__(self):\r
return self.name
+
class Language(models.Model):
# Import data from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
# (yes, there is a UTF16 BOM in the UTF8 file)
def __unicode__(self):
return self.name
+
class OrganisationType(models.Model):
typename = models.CharField(max_length=32, null=False, blank=False)
def __unicode__(self):
return self.typename
+
class Organisation(models.Model):
name = models.CharField(max_length=100, null=False, blank=False, unique=True)
approved = models.BooleanField(null=False, default=False)
def __unicode__(self):
return self.internalname
+
class ImportedRSSItem(models.Model):
feed = models.ForeignKey(ImportedRSSFeed)
title = models.CharField(max_length=100, null=False, blank=False)
# From man sshd, except for ssh-dss
_valid_keytypes = ['ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519', 'ssh-rsa']
+
+
# Options, keytype, key, comment. But we don't support options.
def validate_sshkey(key):
lines = key.splitlines()
except:
raise ValidationError("Incorrect base64 encoded key!")
+
# Extra attributes for users (if they have them)
class UserProfile(models.Model):
user = models.OneToOneField(User, null=False, blank=False, primary_key=True)
- sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text= "Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ])
+ sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text="Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ])
lastmodified = models.DateTimeField(null=False, blank=False, auto_now=True)
+
# Notifications sent for any moderated content.
# Yes, we uglify it by storing the type of object as a string, so we don't
# end up with a bazillion fields being foreign keys. Ugly, but works.
objectid = models.IntegerField(null=False, blank=False, db_index=True)
objecttype = models.CharField(null=False, blank=False, max_length=100)
text = models.TextField(null=False, blank=False)
- author = models.CharField(null=False, blank=False, max_length=100)
+ author = models.CharField(null=False, blank=False, max_length=100)
date = models.DateTimeField(null=False, blank=False, auto_now=True)
def __unicode__(self):
import os
+
def get_struct():
yield ('', None)
yield ('community/', None)
register = template.Library()
+
@register.filter(name='class_name')
def class_name(ob):
return ob.__class__.__name__
+
@register.filter(is_safe=True)
def field_class(value, arg):
if 'class' in value.field.widget.attrs:
c = arg
return value.as_widget(attrs={"class": c})
+
@register.filter(name='hidemail')
@stringfilter
def hidemail(value):
return value.replace('@', ' at ')
+
@register.filter(is_safe=True)
def ischeckbox(obj):
return obj.field.widget.__class__.__name__ in ["CheckboxInput", "CheckboxSelectMultiple"] and not getattr(obj.field, 'regular_field', False)
+
@register.filter(is_safe=True)
def ismultiplecheckboxes(obj):
return obj.field.widget.__class__.__name__ == "CheckboxSelectMultiple" and not getattr(obj.field, 'regular_field', False)
+
@register.filter(is_safe=True)
def isrequired_error(obj):
if obj.errors and obj.errors[0] == u"This field is required.":
return True
return False
+
@register.filter(is_safe=True)
def label_class(value, arg):
return value.label_tag(attrs={'class': arg})
+
@register.filter()
def planet_author(obj):
# takes a ImportedRSSItem object from a Planet feed and extracts the author
# information from the title
return obj.title.split(':')[0]
+
@register.filter()
def planet_title(obj):
# takes a ImportedRSSItem object from a Planet feed and extracts the info
# specific to the title of the Planet entry
return ":".join(obj.title.split(':')[1:])
+
@register.filter(name='dictlookup')
def dictlookup(value, key):
return value.get(key, None)
+
@register.filter(name='json')
def tojson(value):
return json.dumps(value)
from models import Organisation
from forms import OrganisationForm, MergeOrgsForm
+
# Front page view
@cache(minutes=10)
def home(request):
'planet': planet,
})
+
# About page view (contains information about PostgreSQL + random quotes)
@cache(minutes=10)
def about(request):
'quotes': quotes,
})
+
# Community main page (contains surveys and potentially more)
def community(request):
s = Survey.objects.filter(current=True)
'planet': planet,
})
+
# List of supported versions
def versions(request):
return render_pgweb(request, 'support', 'support/versioning.html', {
- 'versions': Version.objects.filter(tree__gt=0).filter(testing=0),
+ 'versions': Version.objects.filter(tree__gt=0).filter(testing=0),
})
+
re_staticfilenames = re.compile("^[0-9A-Z/_-]+$", re.IGNORECASE)
+
+
# Generic fallback view for static pages
def fallback(request, url):
if url.find('..') > -1:
# Guestimate the nav section by looking at the URL and taking the first
# piece of it.
try:
- navsect = url.split('/',2)[0]
+ navsect = url.split('/', 2)[0]
except:
navsect = ''
c = PGWebContextProcessor(request)
c.update({'navmenu': get_nav_menu(navsect)})
return HttpResponse(t.render(c))
+
# Edit-forms for core objects
@login_required
def organisationform(request, itemid):
return simple_form(Organisation, itemid, request, OrganisationForm,
redirect='/account/edit/organisations/')
+
# robots.txt
def robots(request):
return HttpResponse("""User-agent: *
x.startElement('urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'})
pages = 0
for p in pagelist:
- pages+=1
+ pages += 1
x.startElement('url', {})
x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0]))
if len(p) > 1 and p[1]:
x.endDocument()
return resp
+
# Sitemap (XML format)
@cache(hours=6)
def sitemap(request):
return _make_sitemap(get_all_pages_struct())
+
# Internal sitemap (only for our own search engine)
# Note! Still served up to anybody who wants it, so don't
# put anything secret in it...
def sitemap_internal(request):
return _make_sitemap(get_all_pages_struct(method='get_internal_struct'))
+
# dynamic CSS serving, meaning we merge a number of different CSS into a
# single one, making sure it turns into a single http response. We do this
# dynamically, since the output will be cached.
_dynamic_cssmap = {
'base': ['media/css/main.css',
- 'media/css/normalize.css',],
+ 'media/css/normalize.css', ],
'docs': ['media/css/global.css',
'media/css/table.css',
'media/css/text.css',
'media/css/docs.css'],
- }
+}
+
@cache(hours=6)
def dynamic_css(request, css):
return resp
+
@nocache
def csrf_failure(request, reason=''):
resp = render(request, 'errors/csrf_failure.html', {
- 'reason': reason,
- })
- resp.status_code = 403 # Forbidden
+ 'reason': reason,
+ })
+ resp.status_code = 403 # Forbidden
return resp
+
# Basic information about the connection
@cache(seconds=30)
def system_information(request):
- return render(request,'core/system_information.html', {
- 'server': os.uname()[1],
- 'cache_server': request.META['REMOTE_ADDR'] or None,
- 'client_ip': get_client_ip(request),
- 'django_version': django.get_version(),
+ return render(request, 'core/system_information.html', {
+ 'server': os.uname()[1],
+ 'cache_server': request.META['REMOTE_ADDR'] or None,
+ 'client_ip': get_client_ip(request),
+ 'django_version': django.get_version(),
})
+
# Sync timestamp for automirror. Keep it around for 30 seconds
# Basically just a check that we can access the backend still...
@cache(seconds=30)
def sync_timestamp(request):
s = datetime.now().strftime("%Y-%m-%d %H:%M:%S\n")
- r = HttpResponse(s, content_type='text/plain')
+ r = HttpResponse(s, content_type='text/plain')
r['Content-Length'] = len(s)
return r
+
# List of all unapproved objects, for the special admin page
@login_required
@user_passes_test(lambda u: u.is_staff)
@user_passes_test(lambda u: u.groups.filter(name='pgweb moderators').exists())
def admin_pending(request):
return render(request, 'core/admin_pending.html', {
- 'app_list': get_all_pending_moderations(),
- })
+ 'app_list': get_all_pending_moderations(),
+ })
+
# Purge objects from varnish, for the admin pages
@login_required
latest = curs.fetchall()
return render(request, 'core/admin_purge.html', {
- 'latest_purges': latest,
- })
+ 'latest_purges': latest,
+ })
+
@csrf_exempt
def api_varnish_purge(request):
curs.execute("SELECT varnish_purge_expr(%s)", (expr, ))
return HttpResponse("Purged %s entries\n" % n)
+
# Merge two organisations
@login_required
@user_passes_test(lambda u: u.is_superuser)
form = MergeOrgsForm()
return render(request, 'core/admin_mergeorg.html', {
- 'form': form,
+ 'form': form,
})
from django import forms
+
class DocCommentForm(forms.Form):
name = forms.CharField(max_length=100, required=True, label='Your Name')
email = forms.EmailField(max_length=100, required=True, label='Your Email')
shortdesc = forms.CharField(max_length=100, required=True, label="Subject")
details = forms.CharField(required=True, widget=forms.Textarea,
- label="What is your comment?")
+ label="What is your comment?")
from django.db import models
from pgweb.core.models import Version
+
class DocPage(models.Model):
id = models.AutoField(null=False, primary_key=True)
file = models.CharField(max_length=64, null=False, blank=False)
# Index file first, because we want to list versions by file
unique_together = [('file', 'version')]
+
class DocPageAlias(models.Model):
file1 = models.CharField(max_length=64, null=False, blank=False, unique=True)
file2 = models.CharField(max_length=64, null=False, blank=False, unique=True)
# XXX: needs a unique functional index as well, see the migration!
class Meta:
db_table = 'docsalias'
- verbose_name_plural='Doc page aliases'
+ verbose_name_plural = 'Doc page aliases'
from django.db import connection
from pgweb.core.models import Version
+
def get_struct():
currentversion = Version.objects.get(current=True)
version = int(version)
yield ('docs/%s/%s' % (version, filename),
- testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio
+ testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio
loaded)
# Also yield the current version urls, with the highest
yield ('docs/current/%s' % filename,
1.0, loaded)
+
# For our internal sitemap (used only by our own search engine),
# include the devel version of the docs (and only those, since the
# other versions are already included)
from models import DocPage
from forms import DocCommentForm
+
@allow_frames
@content_sources('style', "'unsafe-inline'")
def docpage(request, version, filename):
where=["file=%s OR file IN (SELECT file2 FROM docsalias WHERE file1=%s) OR file IN (SELECT file1 FROM docsalias WHERE file2=%s)"],
params=[fullname, fullname, fullname],
select={
- 'supported':"COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')",
- 'testing':"COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)",
- }).order_by('-supported', 'version').only('version', 'file')
+ 'supported': "COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')",
+ 'testing': "COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)",
+ }).order_by('-supported', 'version').only('version', 'file')
return render(request, 'docs/docspage.html', {
'page': page,
'loaddate': loaddate,
})
+
def docspermanentredirect(request, version, typ, page, *args):
"""Provides a permanent redirect from the old static/interactive pages to
the modern pages that do not have said keywords.
url += page
return HttpResponsePermanentRedirect(url)
+
def docsrootpage(request, version):
return docpage(request, version, 'index')
+
def redirect_root(request, version):
return HttpResponsePermanentRedirect("/docs/%s/" % version)
+
def root(request):
- versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
+ versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0, tree__gt=0)).order_by('-tree')
return render_pgweb(request, 'docs', 'docs/index.html', {
'versions': versions,
})
+
class _VersionPdfWrapper(object):
"""
A wrapper around a version that knows to look for PDF files, and
self.indexname = 'postgres.html'
else:
self.indexname = 'index.html'
+
def __getattr__(self, name):
return getattr(self.__version, name)
+
def _find_pdf(self, pagetype):
try:
return os.stat('%s/documentation/pdf/%s/postgresql-%s-%s.pdf' % (settings.STATIC_CHECKOUT, self.__version.numtree, self.__version.numtree, pagetype)).st_size
except:
return 0
+
def manuals(request):
- versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
+ versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0, tree__gt=0)).order_by('-tree')
return render_pgweb(request, 'docs', 'docs/manuals.html', {
'versions': [_VersionPdfWrapper(v) for v in versions],
})
+
def manualarchive(request):
- versions = Version.objects.filter(testing=0,supported=False,tree__gt=0).order_by('-tree')
+ versions = Version.objects.filter(testing=0, supported=False, tree__gt=0).order_by('-tree')
return render_pgweb(request, 'docs', 'docs/archive.html', {
'versions': [_VersionPdfWrapper(v) for v in versions],
})
+
@login_required
def commentform(request, itemid, version, filename):
v = get_object_or_404(Version, tree=version)
from pgweb.util.admin import PgwebAdmin
from models import StackBuilderApp, Category, Product, LicenceType
+
class ProductAdmin(PgwebAdmin):
list_display = ('name', 'org', 'approved', 'lastconfirmed',)
list_filter = ('approved',)
search_fields = ('name', 'description', )
ordering = ('name', )
+
def duplicate_stackbuilderapp(modeladmin, request, queryset):
# Duplicate each individual selected object, but turn off
# the active flag if it's on.
for o in queryset:
- o.id = None # Triggers creation of a new object
+ o.id = None # Triggers creation of a new object
o.active = False
o.textid = o.textid + "_new"
o.save()
+
duplicate_stackbuilderapp.short_description = "Duplicate application"
+
class StackBuilderAppAdminForm(forms.ModelForm):
class Meta:
model = StackBuilderApp
raise ValidationError("Dependency '%s' does not exist!" % d)
return self.cleaned_data['txtdependencies']
+
class StackBuilderAppAdmin(admin.ModelAdmin):
list_display = ('textid', 'active', 'name', 'platform', 'version', )
actions = [duplicate_stackbuilderapp, ]
form = StackBuilderAppAdminForm
+
admin.site.register(Category)
admin.site.register(LicenceType)
admin.site.register(Product, ProductAdmin)
from pgweb.core.models import Organisation
from models import Product
+
class ProductForm(forms.ModelForm):
form_intro = """Note that in order to register a new product, you must first register an organisation.
If you have not done so, use <a href="/account/organisations/new/">this form</a>."""
+
def __init__(self, *args, **kwargs):
super(ProductForm, self).__init__(*args, **kwargs)
+
def filter_by_user(self, user):
self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+
class Meta:
model = Product
exclude = ('lastconfirmed', 'approved', )
class Meta:
ordering = ('catname',)
+
class LicenceType(models.Model):
typename = models.CharField(max_length=100, null=False, blank=False)
class Meta:
ordering = ('typename',)
+
class Product(models.Model):
name = models.CharField(max_length=100, null=False, blank=False, unique=True)
approved = models.BooleanField(null=False, default=False)
class Meta:
ordering = ('name',)
+
class StackBuilderApp(models.Model):
textid = models.CharField(max_length=100, null=False, blank=False)
version = models.CharField(max_length=20, null=False, blank=False)
platform = models.CharField(max_length=20, null=False, blank=False,
- choices= (('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), ('osx', 'Mac OS X'),
- ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
- )
+ choices=(
+ ('windows', 'Windows (32-bit)'),
+ ('windows-x64', 'Windows (64-bit)'),
+ ('osx', 'Mac OS X'),
+ ('linux', 'Linux (32-bit)'),
+ ('linux-x64', 'Linux (64-bit)'),
+ ))
secondaryplatform = models.CharField(max_length=20, null=False, blank=True,
- choices= (('', 'None'), ('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'),
- ('osx', 'Mac OS X'), ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
- )
+ choices=(
+ ('', 'None'),
+ ('windows', 'Windows (32-bit)'),
+ ('windows-x64', 'Windows (64-bit)'),
+ ('osx', 'Mac OS X'),
+ ('linux', 'Linux (32-bit)'),
+ ('linux-x64', 'Linux (64-bit)')
+ ))
name = models.CharField(max_length=500, null=False, blank=False)
active = models.BooleanField(null=False, blank=False, default=True)
description = models.TextField(null=False, blank=False)
pgversion = models.CharField(max_length=5, null=False, blank=True)
edbversion = models.CharField(max_length=5, null=False, blank=True)
format = models.CharField(max_length=5, null=False, blank=False,
- choices = (('bin', 'Linux .bin'), ('app', 'Mac .app'),
- ('pkg', 'Mac .pkg'), ('mpkg', 'Mac .mpkg'),
- ('exe', 'Windows .exe'), ('msi', 'Windows .msi'))
- )
+ choices=(
+ ('bin', 'Linux .bin'),
+ ('app', 'Mac .app'),
+ ('pkg', 'Mac .pkg'),
+ ('mpkg', 'Mac .mpkg'),
+ ('exe', 'Windows .exe'),
+ ('msi', 'Windows .msi')
+ ))
installoptions = models.CharField(max_length=500, null=False, blank=True)
upgradeoptions = models.CharField(max_length=500, null=False, blank=True)
checksum = models.CharField(max_length=32, null=False, blank=False)
from models import Category
+
def get_struct():
# Products
for c in Category.objects.all():
from models import Category, Product, StackBuilderApp
from forms import ProductForm
+
#######
# FTP browser
#######
raise Http404
subpath = subpath.strip('/')
else:
- subpath=""
+ subpath = ""
# Pickle up the list of things we need
try:
del allnodes
# Add all directories
- directories = [{'link': k, 'url': k, 'type': 'd'} for k,v in node.items() if v['t'] == 'd']
+ directories = [{'link': k, 'url': k, 'type': 'd'} for k, v in node.items() if v['t'] == 'd']
# Add all symlinks (only directories supported)
- directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k,v in node.items() if v['t'] == 'l'])
+ directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k, v in node.items() if v['t'] == 'l'])
# A ittle early sorting wouldn't go amiss, so .. ends up at the top
- directories.sort(key = version_sort, reverse=True)
+ directories.sort(key=version_sort, reverse=True)
# Add a link to the parent directory
if subpath:
- directories.insert(0, {'link':'[Parent Directory]', 'url':'..'})
+ directories.insert(0, {'link': '[Parent Directory]', 'url': '..'})
# Fetch files
- files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k,v in node.items() if v['t'] == 'f']
+ files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k, v in node.items() if v['t'] == 'f']
breadcrumbs = []
if subpath:
breadroot = "%s/%s" % (breadroot, pathpiece)
else:
breadroot = pathpiece
- breadcrumbs.append({'name': pathpiece, 'path': breadroot});
+ breadcrumbs.append({'name': pathpiece, 'path': breadroot})
# Check if there are any "content files" we should render directly on the webpage
- file_readme = (node.has_key('README') and node['README']['t']=='f') and node['README']['c'] or None;
- file_message = (node.has_key('.message') and node['.message']['t']=='f') and node['.message']['c'] or None;
- file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None;
+ file_readme = (node.has_key('README') and node['README']['t'] == 'f') and node['README']['c'] or None
+ file_message = (node.has_key('.message') and node['.message']['t'] == 'f') and node['.message']['c'] or None
+ file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None
del node
# Finally, indicate to the client that we're happy
return HttpResponse("OK", content_type="text/plain")
+
@csrf_exempt
def uploadyum(request):
if request.method != 'PUT':
# Finally, indicate to the client that we're happy
return HttpResponse("OK", content_type="text/plain")
+
@nocache
def mirrorselect(request, path):
# Old access to mirrors will just redirect to the main ftp site.
return render(request, 'downloads/js/yum.js', {
'json': jsonstr,
'supported_versions': ','.join([str(v.numtree) for v in Version.objects.filter(supported=True)]),
- }, content_type='application/json')
+ }, content_type='application/json')
+
#######
# Product catalogue
'categories': categories,
})
+
def productlist(request, catid, junk=None):
category = get_object_or_404(Category, pk=catid)
- products = Product.objects.select_related('org','licencetype').filter(category=category, approved=True)
+ products = Product.objects.select_related('org', 'licencetype').filter(category=category, approved=True)
return render_pgweb(request, 'download', 'downloads/productlist.html', {
'category': category,
'products': products,
'productcount': len(products),
})
+
@login_required
def productform(request, itemid):
return simple_form(Product, itemid, request, ProductForm,
redirect='/account/edit/products/')
+
#######
# Stackbuilder
#######
from pgweb.util.admin import PgwebAdmin
from models import Event
+
def approve_event(modeladmin, request, queryset):
# We need to do this in a loop even though it's less efficient,
# since using queryset.update() will not send the moderation messages.
for e in queryset:
e.approved = True
e.save()
+
+
approve_event.short_description = 'Approve event'
+
class EventAdminForm(forms.ModelForm):
class Meta:
model = Event
del cleaned_data['country']
return cleaned_data
+
class EventAdmin(PgwebAdmin):
list_display = ('title', 'org', 'startdate', 'enddate', 'approved',)
list_filter = ('approved',)
from datetime import datetime, time
+
class EventFeed(Feed):
title = description = "PostgreSQL events"
link = "https://www.postgresql.org/"
return "https://www.postgresql.org/about/event/%s/" % obj.id
def item_pubdate(self, obj):
- return datetime.combine(obj.startdate,time.min)
+ return datetime.combine(obj.startdate, time.min)
from pgweb.core.models import Organisation
from models import Event
+
class EventForm(forms.ModelForm):
toggle_fields = [
{
'name': 'isonline',
'invert': True,
- 'fields': ['city', 'state', 'country',]
+ 'fields': ['city', 'state', 'country', ]
},
]
+
def __init__(self, *args, **kwargs):
super(EventForm, self).__init__(*args, **kwargs)
+
def filter_by_user(self, user):
self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
from pgweb.core.models import Country, Language, Organisation
+
class Event(models.Model):
approved = models.BooleanField(null=False, blank=False, default=False)
mgrs = self.org.managers.all()
if len(mgrs) == 1:
if mgrs[0].pk == 0:
- return False # Migration organisation
+ return False # Migration organisation
else:
- return True # Has an actual organisation
+ return True # Has an actual organisation
elif len(mgrs) > 1:
# More than one manager means it must be new
return True
- return False # Has no organisastion at all
+ return False # Has no organisastion at all
@property
def displaydate(self):
return "%s, %s" % (self.city, self.country)
class Meta:
- ordering = ('-startdate','-enddate',)
+ ordering = ('-startdate', '-enddate', )
from datetime import date
from models import Event
+
def get_struct():
now = date.today()
if yearsold > 4:
yearsold = 4
yield ('about/event/%s/' % n.id,
- 0.5-(yearsold/10.0))
+ 0.5 - (yearsold / 10.0))
from models import Event
from forms import EventForm
+
def main(request):
community_events = Event.objects.select_related('country').filter(approved=True, badged=True).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
other_events = Event.objects.select_related('country').filter(approved=True, badged=False).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
return render_pgweb(request, 'about', 'events/archive.html', {
'title': 'Upcoming Events',
'eventblocks': (
- { 'name': 'Community Events', 'events': community_events, 'link': '',},
- { 'name': 'Other Events', 'events': other_events, 'link': '',},
+ {'name': 'Community Events', 'events': community_events, 'link': '', },
+ {'name': 'Other Events', 'events': other_events, 'link': '', },
),
})
+
def _eventarchive(request, title):
# Hardcode to the latest 100 events. Do we need paging too?
events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100]
return render_pgweb(request, 'about', 'events/archive.html', {
- 'title': '%s Archive' % title,
- 'archive': True,
- 'eventblocks': (
- {'name': title, 'events': events, },
- ),
+ 'title': '%s Archive' % title,
+ 'archive': True,
+ 'eventblocks': (
+ {'name': title, 'events': events, },
+ ),
})
+
def archive(request):
return _eventarchive(request, 'Event')
+
def item(request, itemid, throwaway=None):
event = get_object_or_404(Event, pk=itemid)
if not event.approved:
'obj': event,
})
+
@login_required
def form(request, itemid):
return simple_form(Event, itemid, request, EventForm,
from models import Feature, FeatureGroup
+
class FeatureInline(admin.TabularInline):
model = Feature
+
class FeatureGroupAdmin(admin.ModelAdmin):
inlines = [FeatureInline, ]
list_display = ('groupname', 'groupsort')
ordering = ['groupsort']
+
class FeatureAdmin(admin.ModelAdmin):
list_display = ('featurename', 'group')
list_filter = ('group',)
search_fields = ('featurename',)
+
admin.site.register(FeatureGroup, FeatureGroupAdmin)
admin.site.register(Feature, FeatureAdmin)
from django.db import models
choices_map = {
- 0: {'str': 'No', 'class': 'no', 'bgcolor': '#ffdddd'},
- 1: {'str': 'Yes', 'class': 'yes', 'bgcolor': '#ddffdd'},
- 2: {'str': 'Obsolete', 'class': 'obs', 'bgcolor': '#ddddff'},
- 3: {'str': '?', 'class': 'unk', 'bgcolor': '#ffffaa'},
+ 0: {'str': 'No', 'class': 'no', 'bgcolor': '#ffdddd'},
+ 1: {'str': 'Yes', 'class': 'yes', 'bgcolor': '#ddffdd'},
+ 2: {'str': 'Obsolete', 'class': 'obs', 'bgcolor': '#ddddff'},
+ 3: {'str': '?', 'class': 'unk', 'bgcolor': '#ffffaa'},
}
-choices = [(k, v['str']) for k,v in choices_map.items()]
+choices = [(k, v['str']) for k, v in choices_map.items()]
+
class FeatureGroup(models.Model):
groupname = models.CharField(max_length=100, null=False, blank=False)
@property
def columns(self):
# Return a list of all the columns for the matrix
- return [b for a,b in versions]
+ return [b for a, b in versions]
+
class Feature(models.Model):
group = models.ForeignKey(FeatureGroup, null=False, blank=False)
featurename = models.CharField(max_length=100, null=False, blank=False)
featuredescription = models.TextField(null=False, blank=True)
- #WARNING! All fields that start with "v" will be considered versions!
+ # WARNING! All fields that start with "v" will be considered versions!
v74 = models.IntegerField(verbose_name="7.4", null=False, blank=False, default=0, choices=choices)
v74.visible_default = False
v80 = models.IntegerField(verbose_name="8.0", null=False, blank=False, default=0, choices=choices)
def columns(self):
# Get a list of column based on all versions that are visible_default
- return [choices_map[getattr(self, a)] for a,b in versions]
+ return [choices_map[getattr(self, a)] for a, b in versions]
@property
def featurelink(self):
else:
return 'detail/%s/' % self.id
-versions = [(f.name,f.verbose_name) for f in Feature()._meta.fields if f.name.startswith('v') and getattr(f, 'visible_default', True)]
+
+versions = [(f.name, f.verbose_name) for f in Feature()._meta.fields if f.name.startswith('v') and getattr(f, 'visible_default', True)]
versions = sorted(versions, key=lambda f: -float(f[1]))
from pgweb.core.models import Version
from models import Feature
+
def root(request):
features = Feature.objects.all().select_related().order_by('group__groupsort', 'group__groupname', 'featurename')
groups = []
'versions': versions,
})
+
def detail(request, featureid):
feature = get_object_or_404(Feature, pk=featureid)
return render_pgweb(request, 'about', 'featurematrix/featuredetail.html', {
-#from django.db import models
+# from django.db import models
# Create your models here.
from django.http import HttpResponseRedirect
+
def mailpref(request, listname):
# Just redirect to the homepage of pglister, don't try specific lists
return HttpResponseRedirect("https://lists.postgresql.org/")
from django import forms
-
from django.conf import settings
import requests
+
class Command(BaseCommand):
help = 'Synchronize mailinglists'
def handle(self, *args, **options):
if settings.ARCHIVES_SEARCH_PLAINTEXT:
- proto="http"
+ proto = "http"
else:
- proto="https"
+ proto = "https"
r = requests.get('{0}://{1}/listinfo/'.format(proto, settings.ARCHIVES_SEARCH_SERVER))
j = r.json()
allgroups = list(set([l['group'] for l in j]))
from django.db import models
+
class MailingListGroup(models.Model):
groupname = models.CharField(max_length=64, null=False, blank=False)
sortkey = models.IntegerField(null=False, default=10)
class Meta:
ordering = ('sortkey', )
+
class MailingList(models.Model):
group = models.ForeignKey(MailingListGroup, null=False)
listname = models.CharField(max_length=64, null=False, blank=False, unique=True)
from models import MailingList, MailingListGroup
+
def listinfo(request):
resp = HttpResponse(content_type='application/json')
- groupdata = [ {
- 'id': g.id,
- 'name': g.groupname,
- 'sort': g.sortkey,
- } for g in MailingListGroup.objects.all()]
- listdata = [ {
- 'id': l.id,
- 'name': l.listname,
- 'groupid': l.group_id,
- 'active': l.active,
- 'shortdesc': l.shortdesc,
- 'description': l.description,
- } for l in MailingList.objects.all()]
+ groupdata = [{
+ 'id': g.id,
+ 'name': g.groupname,
+ 'sort': g.sortkey,
+ } for g in MailingListGroup.objects.all()]
+ listdata = [{
+ 'id': l.id,
+ 'name': l.listname,
+ 'groupid': l.group_id,
+ 'active': l.active,
+ 'shortdesc': l.shortdesc,
+ 'description': l.description,
+ } for l in MailingList.objects.all()]
json.dump({'groups': groupdata, 'lists': listdata}, resp)
return resp
from models import QueuedMail
+
class QueuedMailAdmin(admin.ModelAdmin):
model = QueuedMail
readonly_fields = ('parsed_content', )
parsed_content.short_description = 'Parsed mail'
+
admin.site.register(QueuedMail, QueuedMailAdmin)
from pgweb.mailqueue.models import QueuedMail
+
class Command(BaseCommand):
help = 'Send queued mail'
from django.db import models
+
class QueuedMail(models.Model):
sender = models.EmailField(max_length=100, null=False, blank=False)
receiver = models.EmailField(max_length=100, null=False, blank=False)
from models import QueuedMail
+
def _encoded_email_header(name, email):
if name:
return formataddr((str(Header(name, 'utf-8')), email))
return email
+
def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, usergenerated=False, cc=None, replyto=None, sendername=None, receivername=None, messageid=None):
# attachment format, each is a tuple of (name, mimetype,contents)
# content should be *binary* and not base64 encoded, since we need to
if attachments:
for filename, contenttype, content in attachments:
- main,sub = contenttype.split('/')
- part = MIMENonMultipart(main,sub)
+ main, sub = contenttype.split('/')
+ part = MIMENonMultipart(main, sub)
part.set_payload(content)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename)
encoders.encode_base64(part)
msg.attach(part)
-
# Just write it to the queue, so it will be transactionally rolled back
QueuedMail(sender=sender, receiver=receiver, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
if cc:
# message content to extract cc fields).
QueuedMail(sender=sender, receiver=cc, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
+
def send_mail(sender, receiver, fullmsg, usergenerated=False):
# Send an email, prepared as the full MIME encoded mail already
QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg, usergenerated=False).save()
from pgweb.core.models import Version
+
class _version_choices():
def __iter__(self):
yield ('-1', '** Select version')
q = Q(supported=True) | Q(testing__gt=0)
for v in Version.objects.filter(q):
- for minor in range(v.latestminor,-1,-1):
- if not v.testing or minor>0:
+ for minor in range(v.latestminor, -1, -1):
+ if not v.testing or minor > 0:
# For beta/rc versions, there is no beta0, so exclude it
s = v.buildversionstring(minor)
- yield (s,s)
+ yield (s, s)
yield ('Unsupported/Unknown', 'Unsupported/Unknown')
+
class SubmitBugForm(forms.Form):
name = forms.CharField(max_length=100, required=True)
email = forms.EmailField(max_length=100, required=True)
pgversion = forms.CharField(max_length=20, required=True,
- label="PostgreSQL version",
- widget=forms.Select(choices=_version_choices()))
+ label="PostgreSQL version",
+ widget=forms.Select(choices=_version_choices()))
os = forms.CharField(max_length=50, required=True,
- label="Operating system")
+ label="Operating system")
shortdesc = forms.CharField(max_length=100, required=True,
- label="Short description")
+ label="Short description")
details = forms.CharField(required=True, widget=forms.Textarea)
def clean_pgversion(self):
from django.db import models
+
class BugIdMap(models.Model):
# Explicit id field because we don't want a SERIAL here, since we generate
# the actual bug IDs externally.
from forms import SubmitBugForm
+
def _make_bugs_messageid(bugid):
return "<{0}-{1}@postgresql.org>".format(
bugid,
hashlib.md5("{0}-{1}".format(os.getpid(), time.time())).hexdigest()[:16],
)
+
@login_required
def submitbug(request):
if request.method == 'POST':
'savebutton': 'Submit and Send Email',
})
+
@login_required
def submitbug_done(request, bugid):
return render_pgweb(request, 'support', 'misc/bug_completed.html', {
'bugid': bugid,
})
+
def bugs_redir(request, bugid):
r = get_object_or_404(BugIdMap, id=bugid)
return HttpResponseRedirect("{0}/message-id/{1}".format(settings.SITE_ROOT, r.messageid))
+
# A crash testing URL. If the file /tmp/crashtest exists, raise a http 500
# error. Otherwise, just return a fixed text response
def crashtest(request):
from pgweb.util.admin import PgwebAdmin
from models import NewsArticle, NewsTag
+
class NewsArticleAdmin(PgwebAdmin):
list_display = ('title', 'org', 'date', 'approved', )
list_filter = ('approved', )
}
return super(NewsArticleAdmin, self).change_view(request, object_id, extra_context=my_context)
+
class NewsTagAdmin(PgwebAdmin):
list_display = ('urlname', 'name', 'description')
+
admin.site.register(NewsArticle, NewsArticleAdmin)
admin.site.register(NewsTag, NewsTagAdmin)
from datetime import datetime, time
+
class NewsFeed(Feed):
title = description = "PostgreSQL news"
link = "https://www.postgresql.org/"
return "https://www.postgresql.org/about/news/%s/" % obj.id
def item_pubdate(self, obj):
- return datetime.combine(obj.date,time.min)
+ return datetime.combine(obj.date, time.min)
from pgweb.core.models import Organisation
from models import NewsArticle, NewsTag
+
class NewsArticleForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(NewsArticleForm, self).__init__(*args, **kwargs)
+
def filter_by_user(self, user):
self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+
def clean_date(self):
if self.instance.pk and self.instance.approved:
if self.cleaned_data['date'] != self.instance.date:
import requests_oauthlib
+
class Command(BaseCommand):
help = 'Post to twitter'
if not curs.fetchall()[0][0]:
raise CommandError("Failed to get advisory lock, existing twitter_post process stuck?")
- articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now()-timedelta(days=7)).order_by('date'))
+ articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now() - timedelta(days=7)).order_by('date'))
if not len(articles):
return
for a in articles:
# We hardcode 30 chars for the URL shortener. And then 10 to cover the intro and spacing.
- statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140-40], settings.SITE_ROOT, a.id)
+ statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140 - 40], settings.SITE_ROOT, a.id)
r = tw.post('https://api.twitter.com/1.1/statuses/update.json', data={
'status': statusstr,
})
import requests_oauthlib
+
class Command(BaseCommand):
help = 'Register with twitter oauth'
from datetime import date
from pgweb.core.models import Organisation
+
class NewsTag(models.Model):
urlname = models.CharField(max_length=20, null=False, blank=False, unique=True)
name = models.CharField(max_length=32, null=False, blank=False)
class Meta:
ordering = ('urlname', )
+
class NewsArticle(models.Model):
org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
approved = models.BooleanField(null=False, blank=False, default=False)
from datetime import date, timedelta
from models import NewsArticle
+
def get_struct():
now = date.today()
- fouryearsago = date.today() - timedelta(4*365, 0, 0)
+ fouryearsago = date.today() - timedelta(4 * 365, 0, 0)
# We intentionally don't put /about/newsarchive/ in the sitemap,
# since we don't care about getting it indexed.
if yearsold > 4:
yearsold = 4
yield ('about/news/%s/' % n.id,
- 0.5-(yearsold/10.0))
+ 0.5 - (yearsold / 10.0))
import json
+
def archive(request, tag=None, paging=None):
if tag:
- tag = get_object_or_404(NewsTag,urlname=tag.strip('/'))
+ tag = get_object_or_404(NewsTag, urlname=tag.strip('/'))
news = NewsArticle.objects.filter(approved=True, tags=tag)
else:
tag = None
'newstags': NewsTag.objects.all(),
})
+
def item(request, itemid, throwaway=None):
news = get_object_or_404(NewsArticle, pk=itemid)
if not news.approved:
'newstags': NewsTag.objects.all(),
})
+
def taglist_json(request):
return HttpResponse(json.dumps({
'tags': [{'name': t.urlname, 'description': t.description} for t in NewsTag.objects.distinct('urlname')],
from pgweb.util.admin import PgwebAdmin
from models import ProfessionalService
+
class ProfessionalServiceAdmin(PgwebAdmin):
list_display = ('__unicode__', 'approved',)
list_filter = ('approved',)
search_fields = ('org__name',)
+
admin.site.register(ProfessionalService, ProfessionalServiceAdmin)
from pgweb.core.models import Organisation
from models import ProfessionalService
+
class ProfessionalServiceForm(forms.ModelForm):
form_intro = """Note that in order to register a new professional service, you must first register an organisation.
If you have not done so, use <a href="/account/organisations/new/">this form</a>."""
+
def __init__(self, *args, **kwargs):
super(ProfessionalServiceForm, self).__init__(*args, **kwargs)
+
def filter_by_user(self, user):
self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+
class Meta:
model = ProfessionalService
exclude = ('submitter', 'approved', )
from pgweb.core.models import Organisation
+
class ProfessionalService(models.Model):
approved = models.BooleanField(null=False, blank=False, default=False)
- org = models.OneToOneField(Organisation, null=False, blank=False,
- db_column="organisation_id",
- verbose_name="organisation",
- help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
- description = models.TextField(null=False,blank=False)
+ org = models.OneToOneField(Organisation, null=False, blank=False,
+ db_column="organisation_id",
+ verbose_name="organisation",
+ help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
+ description = models.TextField(null=False, blank=False)
employees = models.CharField(max_length=32, null=True, blank=True)
locations = models.CharField(max_length=128, null=True, blank=True)
region_africa = models.BooleanField(null=False, default=False, verbose_name="Africa")
from views import regions
+
def get_struct():
for key, name in regions:
yield ('support/professional_support/%s/' % key, None)
from forms import ProfessionalServiceForm
regions = (
- ('africa','Africa'),
- ('asia','Asia'),
- ('europe','Europe'),
- ('northamerica','North America'),
- ('oceania','Oceania'),
- ('southamerica','South America'),
+ ('africa', 'Africa'),
+ ('asia', 'Asia'),
+ ('europe', 'Europe'),
+ ('northamerica', 'North America'),
+ ('oceania', 'Oceania'),
+ ('southamerica', 'South America'),
)
+
def root(request, servtype):
- title = servtype=='support' and 'Professional Services' or 'Hosting Providers'
- what = servtype=='support' and 'support' or 'hosting'
- support = servtype=='support'
+ title = servtype == 'support' and 'Professional Services' or 'Hosting Providers'
+ what = servtype == 'support' and 'support' or 'hosting'
+ support = servtype == 'support'
return render_pgweb(request, 'support', 'profserv/root.html', {
'title': title,
'support': support,
def region(request, servtype, regionname):
- regname = [n for r,n in regions if r==regionname]
+ regname = [n for r, n in regions if r == regionname]
if not regname:
raise Http404
regname = regname[0]
- what = servtype=='support' and 'support' or 'hosting'
- whatname = servtype=='support' and 'Professional Services' or 'Hosting Providers'
+ what = servtype == 'support' and 'support' or 'hosting'
+ whatname = servtype == 'support' and 'Professional Services' or 'Hosting Providers'
title = "%s - %s" % (whatname, regname)
- support = servtype=='support'
+ support = servtype == 'support'
# DB model is a bit funky here, so use the extra-where functionality to filter properly.
# Field names are cleaned up earlier, so it's safe against injections.
- services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what),])
+ services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what), ])
return render_pgweb(request, 'support', 'profserv/list.html', {
'title': title,
from pgweb.util.admin import PgwebAdmin
from models import PUG
+
class PUGAdmin(PgwebAdmin):
list_display = ('title', 'approved', )
list_filter = ('approved', )
search_fields = ('title', )
+
admin.site.register(PUG, PUGAdmin)
from django.db import models
+
class PUG(models.Model):
"""
contains information about a local PostgreSQL user group
from models import PUG
+
def index(request):
"""
contains list of PUGs, in country/locale alphabetical order
from django.contrib import admin
from models import Quote
+
class QuoteAdmin(admin.ModelAdmin):
list_display = ('quote', 'who', 'org', )
+
admin.site.register(Quote, QuoteAdmin)
from django.db import models
+
class Quote(models.Model):
approved = models.BooleanField(null=False, default=False)
quote = models.TextField(null=False, blank=False)
-#from django.db import models
+# from django.db import models
# it, so we allow development installs to run without it...
try:
import pylibmc
- has_memcached=True
+ has_memcached = True
except:
- has_memcached=False
+ has_memcached = False
+
def generate_pagelinks(pagenum, totalpages, querystring):
# Generate a list of links to page through a search result
if pagenum > 1:
# Prev link
- yield '<a href="%s&p=%s">Prev</a>' % (querystring, pagenum-1)
+ yield '<a href="%s&p=%s">Prev</a>' % (querystring, pagenum - 1)
if pagenum > 10:
start = pagenum - 10
else:
start = 1
- for i in range(start, min(start+20, totalpages + 1)):
+ for i in range(start, min(start + 20, totalpages + 1)):
if i == pagenum:
yield "%s" % i
else:
yield '<a href="%s&p=%s">%s</a>' % (querystring, i, i)
- if pagenum != min(start+20, totalpages):
- yield '<a href="%s&p=%s">Next</a>' % (querystring, pagenum+1)
+ if pagenum != min(start + 20, totalpages):
+ yield '<a href="%s&p=%s">Next</a>' % (querystring, pagenum + 1)
@csrf_exempt
dateval = 365
sortoptions = (
- {'val':'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')},
- {'val':'d', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'},
- {'val':'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'},
- )
+ {'val': 'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')},
+ {'val': 'd', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'},
+ {'val': 'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'},
+ )
dateoptions = (
{'val': -1, 'text': 'anytime'},
{'val': 1, 'text': 'within last day'},
{'val': 31, 'text': 'within last month'},
{'val': 186, 'text': 'within last 6 months'},
{'val': 365, 'text': 'within last year'},
- )
+ )
else:
searchlists = False
if request.GET.has_key('u'):
if not request.GET.has_key('q') or request.GET['q'] == '':
if searchlists:
return render(request, 'search/listsearch.html', {
- 'search_error': "No search term specified.",
- 'sortoptions': sortoptions,
- 'lists': MailingList.objects.all().order_by("group__sortkey"),
- 'listid': listid,
- 'dates': dateoptions,
- 'dateval': dateval,
- })
+ 'search_error': "No search term specified.",
+ 'sortoptions': sortoptions,
+ 'lists': MailingList.objects.all().order_by("group__sortkey"),
+ 'listid': listid,
+ 'dates': dateoptions,
+ 'dateval': dateval,
+ })
else:
return render(request, 'search/sitesearch.html', {
- 'search_error': "No search term specified.",
- })
+ 'search_error': "No search term specified.",
+ })
query = request.GET['q'].strip()
# Anti-stefan prevention
if len(query) > 1000:
return render(request, 'search/sitesearch.html', {
'search_error': "Search term too long.",
- })
+ })
# Is the request being paged?
if request.GET.has_key('p'):
p = {
'q': query.encode('utf-8'),
's': listsort,
- }
+ }
if listid:
if listid < 0:
# This is a list group, we expand that on the web server
# If memcached is available, let's try it
hits = None
if has_memcached:
- memc = pylibmc.Client(['127.0.0.1',], binary=True)
+ memc = pylibmc.Client(['127.0.0.1', ], binary=True)
# behavior not supported on pylibmc in squeeze:: behaviors={'tcp_nodelay':True})
try:
hits = memc.get(urlstr)
else:
c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'})
- c.sock.settimeout(20) # Set a 20 second timeout
+ c.sock.settimeout(20) # Set a 20 second timeout
try:
r = c.getresponse()
except (socket.timeout, ssl.SSLError):
return render(request, 'search/listsearch.html', {
- 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.',
- })
+ 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.',
+ })
if r.status != 200:
memc = None
return render(request, 'search/listsearch.html', {
- 'search_error': 'Error talking to search server: %s' % r.reason,
- })
+ 'search_error': 'Error talking to search server: %s' % r.reason,
+ })
hits = json.loads(r.read())
if has_memcached and memc:
# Store them in memcached too! But only for 10 minutes...
# And always compress it, just because we can
- memc.set(urlstr, hits, 60*10, 1)
+ memc.set(urlstr, hits, 60 * 10, 1)
memc = None
if isinstance(hits, dict):
listid or '',
dateval,
listsort
- )
+ )
return render(request, 'search/listsearch.html', {
- 'hitcount': totalhits,
- 'firsthit': firsthit,
- 'lasthit': min(totalhits, firsthit+hitsperpage-1),
- 'query': request.GET['q'],
- 'pagelinks': " ".join(
- generate_pagelinks(pagenum,
- totalhits / hitsperpage + 1,
- querystr)),
- 'hits': [{
- 'date': h['d'],
- 'subject': h['s'],
- 'author': h['f'],
- 'messageid': h['m'],
- 'abstract': h['a'],
- 'rank': h['r'],
- } for h in hits[firsthit-1:firsthit+hitsperpage-1]],
- 'sortoptions': sortoptions,
- 'lists': MailingList.objects.all().order_by("group__sortkey"),
- 'listid': listid,
- 'dates': dateoptions,
- 'dateval': dateval,
- })
+ 'hitcount': totalhits,
+ 'firsthit': firsthit,
+ 'lasthit': min(totalhits, firsthit + hitsperpage - 1),
+ 'query': request.GET['q'],
+ 'pagelinks': " ".join(
+ generate_pagelinks(pagenum,
+ totalhits / hitsperpage + 1,
+ querystr)),
+ 'hits': [{
+ 'date': h['d'],
+ 'subject': h['s'],
+ 'author': h['f'],
+ 'messageid': h['m'],
+ 'abstract': h['a'],
+ 'rank': h['r'],
+ } for h in hits[firsthit - 1:firsthit + hitsperpage - 1]],
+ 'sortoptions': sortoptions,
+ 'lists': MailingList.objects.all().order_by("group__sortkey"),
+ 'listid': listid,
+ 'dates': dateoptions,
+ 'dateval': dateval,
+ })
else:
# Website search is still done by making a regular pgsql connection
curs = conn.cursor()
except:
return render(request, 'search/sitesearch.html', {
- 'search_error': 'Could not connect to search database.'
- })
+ 'search_error': 'Could not connect to search database.'
+ })
# This is kind of a hack, but... Some URLs are flagged as internal
# and should as such only be included in searches that explicitly
'allsites': allsites,
'suburl': suburl,
'internal': include_internal,
- })
+ })
except psycopg2.ProgrammingError:
return render(request, 'search/sitesearch.html', {
- 'search_error': 'Error executing search query.'
- })
+ 'search_error': 'Error executing search query.'
+ })
hits = curs.fetchall()
conn.close()
urllib.quote_plus(query.encode('utf-8')),
allsites and "1" or "0",
quoted_suburl,
- )
+ )
return render(request, 'search/sitesearch.html', {
- 'suburl': suburl,
- 'allsites': allsites,
- 'hitcount': totalhits,
- 'firsthit': firsthit,
- 'lasthit': min(totalhits, firsthit+hitsperpage-1),
- 'query': request.GET['q'],
- 'pagelinks': " ".join(
- generate_pagelinks(pagenum,
- totalhits / hitsperpage + 1,
- querystr)),
- 'hits': [{
- 'title': h[3],
- 'url': "%s%s" % (h[1], h[2]),
- 'abstract': h[4].replace("[[[[[[", "<strong>").replace("]]]]]]","</strong>"),
- 'rank': h[5]} for h in hits[:-1]],
- })
+ 'suburl': suburl,
+ 'allsites': allsites,
+ 'hitcount': totalhits,
+ 'firsthit': firsthit,
+ 'lasthit': min(totalhits, firsthit + hitsperpage - 1),
+ 'query': request.GET['q'],
+ 'pagelinks': " ".join(
+ generate_pagelinks(pagenum,
+ totalhits / hitsperpage + 1,
+ querystr)),
+ 'hits': [{
+ 'title': h[3],
+ 'url': "%s%s" % (h[1], h[2]),
+ 'abstract': h[4].replace("[[[[[[", "<strong>").replace("]]]]]]", "</strong>"),
+ 'rank': h[5]} for h in hits[:-1]],
+ })
from pgweb.news.models import NewsArticle
from models import SecurityPatch, SecurityPatchVersion
+
class VersionChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return obj.numtree
+
class SecurityPatchVersionAdminForm(forms.ModelForm):
model = SecurityPatchVersion
version = VersionChoiceField(queryset=Version.objects.filter(tree__gt=0), required=True)
+
class SecurityPatchVersionAdmin(admin.TabularInline):
model = SecurityPatchVersion
extra = 2
form = SecurityPatchVersionAdminForm
+
class SecurityPatchForm(forms.ModelForm):
model = SecurityPatch
newspost = forms.ModelChoiceField(queryset=NewsArticle.objects.filter(org=settings.PGDG_ORG_ID), required=False)
def clean(self):
d = super(SecurityPatchForm, self).clean()
- vecs = [v for k,v in d.items() if k.startswith('vector_')]
+ vecs = [v for k, v in d.items() if k.startswith('vector_')]
empty = [v for v in vecs if v == '']
if len(empty) != len(vecs) and len(empty) != 0:
for k in d.keys():
self.add_error(k, 'Either specify all vector values or none')
return d
+
class SecurityPatchAdmin(admin.ModelAdmin):
form = SecurityPatchForm
exclude = ['cvenumber', ]
def make_public(self, request, queryset):
self.do_public(queryset, True)
+
def make_unpublic(self, request, queryset):
self.do_public(queryset, False)
+
def do_public(self, queryset, val):
# Intentionally loop and do manually, so we generate change notices
for p in queryset.all():
- p.public=val
+ p.public = val
p.save()
+
admin.site.register(SecurityPatch, SecurityPatchAdmin)
import requests
+
class Command(BaseCommand):
help = 'Update CVE links'
import cvss
-vector_choices = {k:list(v.items()) for k,v in cvss.constants3.METRICS_VALUE_NAMES.items()}
+vector_choices = {k: list(v.items()) for k, v in cvss.constants3.METRICS_VALUE_NAMES.items()}
component_choices = (
('core server', 'Core server product'),
('other', 'Other'),
)
+
re_cve = re.compile('^(\d{4})-(\d{4,5})$')
+
+
def cve_validator(val):
if not re_cve.match(val):
raise ValidationError("Enter CVE in format 0000-0000 without the CVE text")
+
def other_vectors_validator(val):
if val != val.upper():
raise ValidationError("Vector must be uppercase")
try:
for vector in val.split('/'):
- k,v = vector.split(':')
+ k, v = vector.split(':')
if not cvss.constants3.METRICS_VALUES.has_key(k):
raise ValidationError("Metric {0} is unknown".format(k))
if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'):
raise ValidationError("Metric {0} must be specified in the dropdowns".format(k))
if not cvss.constants3.METRICS_VALUES[k].has_key(v):
raise ValidationError("Metric {0} has unknown value {1}. Valind ones are: {2}".format(
- k,v,
+ k, v,
", ".join(cvss.constants3.METRICS_VALUES[k].keys()),
))
except ValidationError:
except Exception, e:
raise ValidationError("Failed to parse vectors: %s" % e)
+
class SecurityPatch(models.Model):
public = models.BooleanField(null=False, blank=False, default=False)
newspost = models.ForeignKey(NewsArticle, null=True, blank=True)
- cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator,])
+ cve = models.CharField(max_length=32, null=False, blank=True, validators=[cve_validator, ])
cve_visible = models.BooleanField(null=False, blank=False, default=False)
cvenumber = models.IntegerField(null=False, blank=False, db_index=True)
detailslink = models.URLField(null=False, blank=True)
vector_c = models.CharField(max_length=1, null=False, blank=True, verbose_name="Confidentiality Impact", choices=vector_choices['C'])
vector_i = models.CharField(max_length=1, null=False, blank=True, verbose_name="Integrity Impact", choices=vector_choices['I'])
vector_a = models.CharField(max_length=1, null=False, blank=True, verbose_name="Availability Impact", choices=vector_choices['A'])
- legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'),('B','B'),('C','C'),('D','D')))
+ legacyscore = models.CharField(max_length=1, null=False, blank=True, verbose_name='Legacy score', choices=(('A', 'A'), ('B', 'B'), ('C', 'C'), ('D', 'D')))
purge_urls = ('/support/security/', )
verbose_name_plural = 'Security patches'
ordering = ('-cvenumber',)
+
class SecurityPatchVersion(models.Model):
patch = models.ForeignKey(SecurityPatch, null=False, blank=False)
version = models.ForeignKey(Version, null=False, blank=False)
fixed_minor = models.IntegerField(null=False, blank=False)
-
from pgweb.core.models import Version
from models import SecurityPatch
+
def GetPatchesList(filt):
return SecurityPatch.objects.raw("SELECT p.*, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END ORDER BY v.tree) AS affected, array_agg(CASE WHEN v.tree >= 10 THEN v.tree::int ELSE v.tree END || '.' || fixed_minor ORDER BY v.tree) AS fixed FROM security_securitypatch p INNER JOIN security_securitypatchversion sv ON p.id=sv.patch_id INNER JOIN core_version v ON v.id=sv.version_id WHERE p.public AND {0} GROUP BY p.id ORDER BY cvenumber DESC".format(filt))
+
def _list_patches(request, filt):
patches = GetPatchesList(filt)
),
})
+
def index(request):
# Show all supported versions
return _list_patches(request, "v.supported")
+
def version(request, numtree):
version = get_object_or_404(Version, tree=numtree)
# It's safe to pass in the value since we get it from the module, not from
MANAGERS = ADMINS
-DATABASES={
+DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgweb',
- }
}
+}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
'pgweb.util.middleware.PgMiddleware',
]
-CSRF_FAILURE_VIEW='pgweb.core.views.csrf_failure'
+CSRF_FAILURE_VIEW = 'pgweb.core.views.csrf_failure'
ROOT_URLCONF = 'pgweb.urls'
},
}]
-LOGIN_URL='/account/login/'
-LOGIN_REDIRECT_URL='/account/'
-LOGOUT_URL='/account/logout/'
+LOGIN_URL = '/account/login/'
+LOGIN_REDIRECT_URL = '/account/'
+LOGOUT_URL = '/account/logout/'
AUTHENTICATION_BACKENDS = (
'pgweb.util.auth.AuthBackend',
]
# Default format for date/time (as it changes between machines)
-DATETIME_FORMAT="Y-m-d H:i:s"
+DATETIME_FORMAT = "Y-m-d H:i:s"
# Configure recaptcha. Most details contain keys and are thus handled
# in settings_local.py. Override NOCAPTCHA to actually use them.
-NOCAPTCHA=True
-RECAPTCHA_SITE_KEY=""
-RECAPTCHA_SECRET_KEY=""
+NOCAPTCHA = True
+RECAPTCHA_SITE_KEY = ""
+RECAPTCHA_SECRET_KEY = ""
###
# Application specific settings, likely overridden in settings_local.py.
#
# In particular, adjust the email addresses
###
-SESSION_COOKIE_SECURE=True # Allow our session only over https
-SESSION_COOKIE_DOMAIN="www.postgresql.org" # Don't allow access by other postgresql.org sites
-SESSION_COOKIE_HTTPONLY=True # Access over http only, no js
-CSRF_COOKIE_SECURE=SESSION_COOKIE_SECURE
-CSRF_COOKIE_DOMAIN=SESSION_COOKIE_DOMAIN
-CSRF_COOKIE_HTTPONLY=SESSION_COOKIE_HTTPONLY
-
-SITE_ROOT="http://www.postgresql.org" # Root of working URLs
-FTP_PICKLE="/usr/local/pgweb/ftpsite.pickle" # Location of file with current contents from ftp site
-YUM_JSON="/usr/local/pgweb/external/yum.json"
-STATIC_CHECKOUT="/usr/local/pgweb-static" # Location of a checked out pgweb-static project
-NOTIFICATION_EMAIL="someone@example.com" # Address to send notifications *to*
-NOTIFICATION_FROM="someone@example.com" # Address to send notifications *from*
-ACCOUNTS_NOREPLY_FROM="someone@example.com" # Address to send account messages from
-BUGREPORT_EMAIL="someone@example.com" # Address to pgsql-bugs list
-BUGREPORT_NOREPLY_EMAIL="someone-noreply@example.com" # Address to no-reply pgsql-bugs address
-DOCSREPORT_EMAIL="someone@example.com" # Address to pgsql-docs list
-DOCSREPORT_NOREPLY_EMAIL="someone-noreply@example.com" # Address to no-reply pgsql-docs address
-FRONTEND_SERVERS=() # A tuple containing the *IP addresses* of all the
- # varnish frontend servers in use.
-FTP_MASTERS=() # A tuple containing the *IP addresses* of all machines
- # trusted to upload ftp structure data
-VARNISH_PURGERS=() # Extra servers that can do varnish purges through our queue
-DO_ESI=False # Generate ESI tags
-ARCHIVES_SEARCH_SERVER="archives.postgresql.org" # Where to post REST request for archives search
-ARCHIVES_SEARCH_PLAINTEXT=False # Contact ARCHIVES_SEARCH_SERVER with http instead of https
-FRONTEND_SMTP_RELAY="magus.postgresql.org" # Where to relay user generated email
-OAUTH={} # OAuth providers and keys
-PGDG_ORG_ID=-1 # id of the PGDG organisation entry
+SESSION_COOKIE_SECURE = True # Allow our session only over https
+SESSION_COOKIE_DOMAIN = "www.postgresql.org" # Don't allow access by other postgresql.org sites
+SESSION_COOKIE_HTTPONLY = True # Access over http only, no js
+CSRF_COOKIE_SECURE = SESSION_COOKIE_SECURE
+CSRF_COOKIE_DOMAIN = SESSION_COOKIE_DOMAIN
+CSRF_COOKIE_HTTPONLY = SESSION_COOKIE_HTTPONLY
+
+SITE_ROOT = "http://www.postgresql.org" # Root of working URLs
+FTP_PICKLE = "/usr/local/pgweb/ftpsite.pickle" # Location of file with current contents from ftp site
+YUM_JSON = "/usr/local/pgweb/external/yum.json"
+STATIC_CHECKOUT = "/usr/local/pgweb-static" # Location of a checked out pgweb-static project
+NOTIFICATION_EMAIL = "someone@example.com" # Address to send notifications *to*
+NOTIFICATION_FROM = "someone@example.com" # Address to send notifications *from*
+ACCOUNTS_NOREPLY_FROM = "someone@example.com" # Address to send account messages from
+BUGREPORT_EMAIL = "someone@example.com" # Address to pgsql-bugs list
+BUGREPORT_NOREPLY_EMAIL = "someone-noreply@example.com" # Address to no-reply pgsql-bugs address
+DOCSREPORT_EMAIL = "someone@example.com" # Address to pgsql-docs list
+DOCSREPORT_NOREPLY_EMAIL = "someone-noreply@example.com" # Address to no-reply pgsql-docs address
+FRONTEND_SERVERS = () # A tuple containing the *IP addresses* of all the
+ # varnish frontend servers in use.
+FTP_MASTERS = () # A tuple containing the *IP addresses* of all machines
+ # trusted to upload ftp structure data
+VARNISH_PURGERS = () # Extra servers that can do varnish purges through our queue
+DO_ESI = False # Generate ESI tags
+ARCHIVES_SEARCH_SERVER = "archives.postgresql.org" # Where to post REST request for archives search
+ARCHIVES_SEARCH_PLAINTEXT = False # Contact ARCHIVES_SEARCH_SERVER with http instead of https
+FRONTEND_SMTP_RELAY = "magus.postgresql.org" # Where to relay user generated email
+OAUTH = {} # OAuth providers and keys
+PGDG_ORG_ID = -1 # id of the PGDG organisation entry
# Load local settings overrides
from settings_local import *
from pgweb.core.models import Country
+
class SponsorType(models.Model):
typename = models.CharField(max_length=32, null=False, blank=False)
description = models.TextField(null=False, blank=False)
class Meta:
ordering = ('sortkey', )
+
class Sponsor(models.Model):
sponsortype = models.ForeignKey(SponsorType, null=False)
name = models.CharField(max_length=128, null=False, blank=False)
class Meta:
ordering = ('name', )
+
class Server(models.Model):
name = models.CharField(max_length=32, null=False, blank=False)
sponsors = models.ManyToManyField(Sponsor)
from models import Sponsor, Server
+
@cache(minutes=30)
def sponsors(request):
- sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey' ,'?')
+ sponsors = Sponsor.objects.select_related().filter(sponsortype__sortkey__gt=0).order_by('sponsortype__sortkey', '?')
return render_pgweb(request, 'about', 'sponsors/sponsors.html', {
'sponsors': sponsors,
})
+
def servers(request):
servers = Server.objects.select_related().all()
return render_pgweb(request, 'about', 'sponsors/servers.html', {
from django.contrib import admin
from models import Survey, SurveyLock, SurveyAnswer
+
class SurveyAdmin(admin.ModelAdmin):
- list_display = ('question','posted','current',)
- ordering = ('-posted',)
+ list_display = ('question', 'posted', 'current', )
+ ordering = ('-posted', )
+
class SurveyAnswerAdmin(admin.ModelAdmin):
- list_display = ('survey','tot1','tot2','tot3','tot4','tot5','tot6','tot7','tot8')
- ordering = ('-survey__posted',)
+ list_display = ('survey', 'tot1', 'tot2', 'tot3', 'tot4', 'tot5', 'tot6', 'tot7', 'tot8')
+ ordering = ('-survey__posted', )
+
admin.site.register(Survey, SurveyAdmin)
admin.site.register(SurveyLock)
from django.db import models
+
# internal text/value object
class SurveyQuestion(object):
def __init__(self, value, text):
self.value = value
self.text = text
+
+
class SurveyAnswerValues(object):
def __init__(self, option, votes, votespercent):
self.option = option
self.votes = votes
self.votespercent = votespercent
+
class Survey(models.Model):
question = models.CharField(max_length=500, null=False, blank=False)
opt1 = models.CharField(max_length=500, null=False, blank=False)
@property
def questions(self):
- for i in range (1,9):
+ for i in range(1, 9):
v = getattr(self, "opt%s" % i)
if not v: break
yield SurveyQuestion(i, v)
@property
def completeanswers(self):
for a in self._get_complete_answers():
- yield SurveyAnswerValues(a[0], a[1], self.totalvotes>0 and (100*a[1]/self.totalvotes) or 0)
+ yield SurveyAnswerValues(a[0], a[1], self.totalvotes > 0 and (100 * a[1] / self.totalvotes) or 0)
@property
def totalvotes(self):
- if not hasattr(self,"_totalvotes"):
+ if not hasattr(self, "_totalvotes"):
self._totalvotes = 0
for a in self._get_complete_answers():
self._totalvotes = self._totalvotes + a[1]
return self._totalvotes
def _get_complete_answers(self):
- for i in range(1,9):
+ for i in range(1, 9):
q = getattr(self, "opt%s" % i)
if not q: break
n = getattr(self.answers, "tot%s" % i)
- yield (q,n)
+ yield (q, n)
def save(self):
# Make sure only one survey at a time can be the current one
for p in previous:
if not p == self:
p.current = False
- p.save() # primary key check avoids recursion
+ p.save() # primary key check avoids recursion
# Now that we've made any previously current ones non-current, we are
# free to save this one.
super(Survey, self).save()
+
class SurveyAnswer(models.Model):
survey = models.OneToOneField(Survey, null=False, blank=False, primary_key=True)
tot1 = models.IntegerField(null=False, default=0)
purge_urls = ('/community/survey', )
+
class SurveyLock(models.Model):
ipaddr = models.GenericIPAddressField(null=False, blank=False)
time = models.DateTimeField(null=False, auto_now_add=True)
from models import Survey, SurveyAnswer, SurveyLock
+
def results(request, surveyid, junk=None):
survey = get_object_or_404(Survey, pk=surveyid)
surveylist = Survey.objects.all().order_by('-posted')
'surveylist': surveylist,
})
+
# Served over insecure HTTP, the Varnish proxy strips cookies
@csrf_exempt
def vote(request, surveyid):
lock.save()
answers = SurveyAnswer.objects.get_or_create(survey=surv)[0]
- setattr(answers, attrname, getattr(answers, attrname)+1)
+ setattr(answers, attrname, getattr(answers, attrname) + 1)
answers.save()
# Do explicit varnish purge, since it seems that the model doesn't
for x in queryset:
x.delete()
custom_delete_selected.short_description = "Delete selected items"
- actions=['custom_delete_selected']
+ actions = ['custom_delete_selected']
def save_model(self, request, obj, form, change):
if change and hasattr(self.model, 'send_notification') and self.model.send_notification:
msgstr)
# Also generate a mail to the moderators
- send_simple_mail(settings.NOTIFICATION_FROM,
- settings.NOTIFICATION_EMAIL,
- "Moderation comment on %s %s" % (obj.__class__._meta.verbose_name, obj.id),
- _get_moderator_notification_text(obj,
- request.POST['new_notification'],
- request.user.username
- ))
-
+ send_simple_mail(
+ settings.NOTIFICATION_FROM,
+ settings.NOTIFICATION_EMAIL,
+ "Moderation comment on %s %s" % (obj.__class__._meta.verbose_name, obj.id),
+ _get_moderator_notification_text(
+ obj,
+ request.POST['new_notification'],
+ request.user.username
+ )
+ )
# Either no notifications, or done with notifications
super(PgwebAdmin, self).save_model(request, obj, form, change)
""" % (objtype, txt)
-
def _get_moderator_notification_text(obj, txt, moderator):
return """Moderator %s made a comment to a pending object:
Object type: %s
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
+
# Special version of the authentication backend, so we can handle things like
# forced lowercasing of usernames.
class AuthBackend(ModelBackend):
# User not found, so clearly they can't log in!
return None
- return None # Should never get here, but just in case...
+ return None # Should never get here, but just in case...
# This is the whole site navigation structure. Stick in a smarter file?
sitenav = {
'about': [
- {'title': 'About', 'link':'/about/'},
- {'title': 'Code of Conduct', 'link':'/about/policies/coc/', 'submenu': [
- {'title': 'Committee', 'link':'/about/policies/coc_committee/'}
+ {'title': 'About', 'link': '/about/'},
+ {'title': 'Code of Conduct', 'link': '/about/policies/coc/', 'submenu': [
+ {'title': 'Committee', 'link': '/about/policies/coc_committee/'}
]},
- {'title': 'Feature Matrix', 'link':'/about/featurematrix/'},
- {'title': 'Donate', 'link':'/about/donate/'},
- {'title': 'History', 'link':'/docs/current/history.html'},
- {'title': 'Sponsors', 'link':'/about/sponsors/', 'submenu': [
- {'title': 'Servers', 'link': '/about/servers/'},
+ {'title': 'Feature Matrix', 'link': '/about/featurematrix/'},
+ {'title': 'Donate', 'link': '/about/donate/'},
+ {'title': 'History', 'link': '/docs/current/history.html'},
+ {'title': 'Sponsors', 'link': '/about/sponsors/', 'submenu': [
+ {'title': 'Servers', 'link': '/about/servers/'},
]},
- {'title': 'Latest News', 'link':'/about/newsarchive/'},
- {'title': 'Upcoming Events', 'link':'/about/events/'},
- {'title': 'Press', 'link':'/about/press/'},
- {'title': 'Licence', 'link':'/about/licence/'},
+ {'title': 'Latest News', 'link': '/about/newsarchive/'},
+ {'title': 'Upcoming Events', 'link': '/about/events/'},
+ {'title': 'Press', 'link': '/about/press/'},
+ {'title': 'Licence', 'link': '/about/licence/'},
],
'download': [
- {'title': 'Downloads', 'link':'/download/', 'submenu': [
- {'title': 'Binary', 'link':'/download/'},
- {'title': 'Source', 'link':'/ftp/source/'}
+ {'title': 'Downloads', 'link': '/download/', 'submenu': [
+ {'title': 'Binary', 'link': '/download/'},
+ {'title': 'Source', 'link': '/ftp/source/'}
]},
- {'title': 'Software Catalogue', 'link':'/download/product-categories/'},
- {'title': 'File Browser', 'link':'/ftp/'},
+ {'title': 'Software Catalogue', 'link': '/download/product-categories/'},
+ {'title': 'File Browser', 'link': '/ftp/'},
],
'docs': [
- {'title': 'Documentation', 'link':'/docs/'},
- {'title': 'Manuals', 'link':'/docs/manuals/', 'submenu': [
- {'title': 'Archive', 'link':'/docs/manuals/archive/'},
- {'title': 'French', 'link':'https://docs.postgresql.fr/'},
- {'title': 'Japanese', 'link':'http://www.postgresql.jp/document/'},
- {'title': 'Russian', 'link':'https://postgrespro.ru/docs/postgresql'},
+ {'title': 'Documentation', 'link': '/docs/'},
+ {'title': 'Manuals', 'link': '/docs/manuals/', 'submenu': [
+ {'title': 'Archive', 'link': '/docs/manuals/archive/'},
+ {'title': 'French', 'link': 'https://docs.postgresql.fr/'},
+ {'title': 'Japanese', 'link': 'http://www.postgresql.jp/document/'},
+ {'title': 'Russian', 'link': 'https://postgrespro.ru/docs/postgresql'},
]},
- {'title': 'Books', 'link':'/docs/books/'},
- {'title': 'Online Resources', 'link':'/docs/online-resources/'},
- {'title': 'Wiki', 'link':'https://wiki.postgresql.org'},
+ {'title': 'Books', 'link': '/docs/books/'},
+ {'title': 'Online Resources', 'link': '/docs/online-resources/'},
+ {'title': 'Wiki', 'link': 'https://wiki.postgresql.org'},
],
'community': [
- {'title': 'Community', 'link':'/community/'},
- {'title': 'Contributors', 'link':'/community/contributors/'},
- {'title': 'Mailing Lists', 'link':'/list/'},
- {'title': 'IRC', 'link':'/community/irc/'},
- {'title': 'Slack', 'link':'https://postgres-slack.herokuapp.com/'},
- {'title': 'Local User Groups', 'link':'/community/user-groups/'},
- {'title': 'Events', 'link':'/about/events/'},
- {'title': 'International Sites','link':'/community/international/'},
- {'title': 'Recognition Guidelines','link':'/community/recognition/'},
+ {'title': 'Community', 'link': '/community/'},
+ {'title': 'Contributors', 'link': '/community/contributors/'},
+ {'title': 'Mailing Lists', 'link': '/list/'},
+ {'title': 'IRC', 'link': '/community/irc/'},
+ {'title': 'Slack', 'link': 'https://postgres-slack.herokuapp.com/'},
+ {'title': 'Local User Groups', 'link': '/community/user-groups/'},
+ {'title': 'Events', 'link': '/about/events/'},
+ {'title': 'International Sites', 'link': '/community/international/'},
+ {'title': 'Recognition Guidelines', 'link': '/community/recognition/'},
],
'developer': [
- {'title': 'Developers', 'link':'/developer/'},
- {'title': 'Core Team', 'link':'/developer/core/'},
- {'title': 'Roadmap', 'link':'/developer/roadmap/'},
- {'title': 'Coding', 'link':'/developer/coding/'},
- {'title': 'Testing', 'link':'/developer/testing/', 'submenu': [
- {'title': 'Beta Information', 'link':'/developer/beta/'},
+ {'title': 'Developers', 'link': '/developer/'},
+ {'title': 'Core Team', 'link': '/developer/core/'},
+ {'title': 'Roadmap', 'link': '/developer/roadmap/'},
+ {'title': 'Coding', 'link': '/developer/coding/'},
+ {'title': 'Testing', 'link': '/developer/testing/', 'submenu': [
+ {'title': 'Beta Information', 'link': '/developer/beta/'},
]},
- {'title': 'Mailing Lists', 'link':'/list/'},
- {'title': 'Developer FAQ', 'link':'https://wiki.postgresql.org/wiki/Developer_FAQ'},
+ {'title': 'Mailing Lists', 'link': '/list/'},
+ {'title': 'Developer FAQ', 'link': 'https://wiki.postgresql.org/wiki/Developer_FAQ'},
],
'support': [
- {'title': 'Support', 'link':'/support/'},
- {'title': 'Versioning Policy', 'link':'/support/versioning/'},
- {'title': 'Security', 'link':'/support/security/'},
- {'title': 'Professional Services','link':'/support/professional_support/'},
- {'title': 'Hosting Solutions', 'link':'/support/professional_hosting/'},
- {'title': 'Report a Bug', 'link':'/account/submitbug/'},
+ {'title': 'Support', 'link': '/support/'},
+ {'title': 'Versioning Policy', 'link': '/support/versioning/'},
+ {'title': 'Security', 'link': '/support/security/'},
+ {'title': 'Professional Services', 'link': '/support/professional_support/'},
+ {'title': 'Hosting Solutions', 'link': '/support/professional_hosting/'},
+ {'title': 'Report a Bug', 'link': '/account/submitbug/'},
],
'account': [
- {'title': 'Your account', 'link':'/account'},
- {'title': 'Profile', 'link':'/account/profile'},
- {'title': 'Submitted data', 'link':'/account', 'submenu': [
- {'title': 'News Articles', 'link':'/account/edit/news/'},
- {'title': 'Events', 'link':'/account/edit/events/'},
- {'title': 'Products', 'link':'/account/edit/products/'},
- {'title': 'Professional Services', 'link':'/account/edit/services/'},
- {'title': 'Organisations', 'link':'/account/edit/organisations/'},
+ {'title': 'Your account', 'link': '/account'},
+ {'title': 'Profile', 'link': '/account/profile'},
+ {'title': 'Submitted data', 'link': '/account', 'submenu': [
+ {'title': 'News Articles', 'link': '/account/edit/news/'},
+ {'title': 'Events', 'link': '/account/edit/events/'},
+ {'title': 'Products', 'link': '/account/edit/products/'},
+ {'title': 'Professional Services', 'link': '/account/edit/services/'},
+ {'title': 'Organisations', 'link': '/account/edit/organisations/'},
]},
- {'title': 'Change password', 'link':'/account/changepwd/'},
- {'title': 'Logout', 'link':'/account/logout'},
+ {'title': 'Change password', 'link': '/account/changepwd/'},
+ {'title': 'Logout', 'link': '/account/logout'},
],
}
else:
return {}
+
def render_pgweb(request, section, template, context):
context['navmenu'] = get_nav_menu(section)
return render(request, template, context)
+
def _get_gitrev():
# Return the current git revision, that is used for
# cache-busting URLs.
# If packed-refs also can't be read, just give up
return 'eeeeeeee'
+
# Template context processor to add information about the root link and
# the current git revision. git revision is returned as a lazy object so
# we don't spend effort trying to load it if we don't need it (though
from collections import defaultdict
from django.contrib.auth.decorators import login_required as django_login_required
+
def nocache(fn):
def _nocache(request, *_args, **_kwargs):
resp = fn(request, *_args, **_kwargs)
return resp
return _nocache
+
def cache(days=0, hours=0, minutes=0, seconds=0):
"Set the server to cache object a specified time. td must be a timedelta object"
def _cache(fn):
def __cache(request, *_args, **_kwargs):
resp = fn(request, *_args, **_kwargs)
td = datetime.timedelta(hours=hours, minutes=minutes, seconds=seconds)
- resp['Cache-Control'] = 's-maxage=%s' % (td.days*3600*24 + td.seconds)
+ resp['Cache-Control'] = 's-maxage=%s' % (td.days * 3600 * 24 + td.seconds)
return resp
return __cache
return _cache
+
def allow_frames(fn):
def _allow_frames(request, *_args, **_kwargs):
resp = fn(request, *_args, **_kwargs)
return resp
return _allow_frames
+
def content_sources(what, source):
def _script_sources(fn):
def __script_sources(request, *_args, **_kwargs):
return __script_sources
return _script_sources
+
def script_sources(source):
return content_sources('script', source)
+
def frame_sources(source):
return content_sources('frame', source)
+
# A wrapped version of login_required that throws an exception if it's
# used on a path that's not under /account/.
def login_required(f):
from django.template.loader import get_template
import django.utils.xmlutils
+
def simple_form(instancetype, itemid, request, formclass, formtemplate='base/form.html', redirect='/account/', navsection='account', fixedfields=None, createifempty=False):
if itemid == 'new':
instance = instancetype()
# Set fixed fields. Note that this will not work if the fixed fields are ManyToMany,
# but we'll fix that sometime in the future
if fixedfields:
- for k,v in fixedfields.items():
+ for k, v in fixedfields.items():
setattr(r, k, v)
r.save()
'operation': (itemid == "new") and "New" or "Edit",
})
-def template_to_string(templatename, attrs = {}):
+
+def template_to_string(templatename, attrs={}):
return get_template(templatename).render(attrs)
+
def HttpServerError(request, msg):
r = render(request, 'errors/500.html', {
- 'message': msg,
- })
+ 'message': msg,
+ })
r.status_code = 500
return r
self.skipempty = skipempty
def add_xml_element(self, name, value):
- if self.skipempty and value=='': return
+ if self.skipempty and value == '': return
self.startElement(name, {})
self.characters(value)
self.endElement(name)
from django.utils._threading_local import local
_thread_locals = local()
+
+
def get_current_user():
return getattr(_thread_locals, 'user', None)
return None
def process_request(self, request):
-# Thread local store for username, see comment at the top of this file
+ # Thread local store for username, see comment at the top of this file
_thread_locals.user = getattr(request, 'user', None)
initialize_template_collection()
('connect', ["'self'", "www.google-analytics.com", "ssl.google-analytics.com"]),
('media', ["'self'", ]),
('style', ["'self'", "fonts.googleapis.com"]),
- ('font', ["'self'", "fonts.gstatic.com", "data:" ]),
+ ('font', ["'self'", "fonts.gstatic.com", "data:", ]),
])
if hasattr(response, 'x_allow_extra_sources'):
- for k,v in response.x_allow_extra_sources.items():
+ for k, v in response.x_allow_extra_sources.items():
if k in sources:
sources[k].extend(v)
else:
sources[k] = v
- security_policies = ["{0}-src {1}".format(k," ".join(v)) for k,v in sources.items()]
+ security_policies = ["{0}-src {1}".format(k, " ".join(v)) for k, v in sources.items()]
if not getattr(response, 'x_allow_frames', False):
response['X-Frame-Options'] = 'DENY'
from pgweb.util.helpers import template_to_string
import re
+
def send_template_mail(sender, receiver, subject, templatename, templateattr={}, usergenerated=False, cc=None, replyto=None, receivername=None, sendername=None, messageid=None):
d = {
'link_root': settings.SITE_ROOT,
receivername=receivername, sendername=sendername,
messageid=messageid)
+
def get_client_ip(request):
"""
Get the IP of the client. If the client is served through our Varnish caches,
"""
connection.cursor().execute("SELECT varnish_purge_xkey(%s)", (xkey, ))
+
def varnish_purge(url):
"""
Purge the specified URL from Varnish. Will add initial anchor to the URL,
url = '^%s' % url
connection.cursor().execute("SELECT varnish_purge(%s)", (url, ))
+
def varnish_purge_expr(expr):
"""
Purge the specified expression from Varnish. Does not modify the expression
"""
connection.cursor().execute("SELECT varnish_purge_expr(%s)", (expr, ))
+
def version_sort(l):
"""
map a directory name to a format that will show up sensibly in an ascii sort
generally don't have that.
"""
mkey = l['link']
- m = re.match('v?([0-9]+)\.([0-9]+)\.([0-9]+)$',l['url'])
+ m = re.match('v?([0-9]+)\.([0-9]+)\.([0-9]+)$', l['url'])
if m:
- mkey = m.group(1) + '%02d' % int(m.group(2)) + '%02d' % int(m.group(3));
- m = re.match('v?([0-9]+)\.([0-9]+)$',l['url'])
+ mkey = m.group(1) + '%02d' % int(m.group(2)) + '%02d' % int(m.group(3))
+ m = re.match('v?([0-9]+)\.([0-9]+)$', l['url'])
if m:
- mkey = m.group(1) + '%02d' % int(m.group(2));
+ mkey = m.group(1) + '%02d' % int(m.group(2))
# SOOO ugly. But if it's v10 and up, just prefix it to get it higher
if int(m.group(1)) >= 10:
mkey = 'a' + mkey
return mkey
+
def generate_random_token():
"""
Generate a random token of 64 characters. This token will be
from pgweb.profserv.models import ProfessionalService
from pgweb.quotes.models import Quote
+
# Pending moderation requests (including URLs for the admin interface))
def _get_unapproved_list(objecttype):
objects = objecttype.objects.filter(approved=False)
if not len(objects): return None
- return { 'name': objects[0]._meta.verbose_name_plural, 'entries':
- [{'url': '/admin/%s/%s/%s/' % (x._meta.app_label, x._meta.model_name, x.pk), 'title': unicode(x)} for x in objects]
- }
+ return {
+ 'name': objects[0]._meta.verbose_name_plural,
+ 'entries': [{'url': '/admin/%s/%s/%s/' % (x._meta.app_label, x._meta.model_name, x.pk), 'title': unicode(x)} for x in objects]
+ }
+
def get_all_pending_moderations():
applist = [
_get_unapproved_list(Product),
_get_unapproved_list(ProfessionalService),
_get_unapproved_list(Quote),
- ]
+ ]
return [x for x in applist if x]
from pgweb.util.misc import varnish_purge
from pgweb.mailqueue.util import send_simple_mail
+
def _build_url(obj):
if obj.id:
return "%s/admin/%s/%s/%s/" % (
obj._meta.model_name,
)
+
def _get_full_text_diff(obj, oldobj):
fieldlist = _get_all_notification_fields(obj)
if not fieldlist:
return "This object does not know how to express ifself."
- s = "\n\n".join(["\n".join(filter(lambda x: not x.startswith('@@'),
- difflib.unified_diff(
- _get_attr_value(oldobj, n).splitlines(),
- _get_attr_value(obj, n).splitlines(),
- n=1,
- lineterm='',
- fromfile=n,
- tofile=n,
- ))
+ s = "\n\n".join(["\n".join(
+ filter(
+ lambda x: not x.startswith('@@'),
+ difflib.unified_diff(
+ _get_attr_value(oldobj, n).splitlines(),
+ _get_attr_value(obj, n).splitlines(),
+ n=1,
+ lineterm='',
+ fromfile=n,
+ tofile=n,
+ )
+ )
) for n in fieldlist if _get_attr_value(oldobj, n) != _get_attr_value(obj, n)])
if not s: return None
return s
+
def _get_all_notification_fields(obj):
if hasattr(obj, 'notify_fields'):
return obj.notify_fields
# that are local to this model (not auto created)
return [f.name for f in obj._meta.get_fields() if not f.name in ('approved', 'submitter', 'id', ) and not f.auto_created]
+
def _get_attr_value(obj, fieldname):
# see if this is a Many-to-many field. If yes, we want to print
# it out as a pretty list
# Return the value, or an empty tring if it's NULL (migrated records)
return unicode(getattr(obj, fieldname)) or ''
+
def _get_full_text_representation(obj):
fieldlist = _get_all_notification_fields(obj)
if not fieldlist:
return "\n".join([u'%s: %s' % (n, _get_attr_value(obj, n)) for n in fieldlist])
+
def _get_notification_text(obj):
try:
oldobj = obj.__class__.objects.get(pk=obj.pk)
return ('{0} id {1} has been modified'.format(obj._meta.verbose_name, obj.id),
'The following fields have been modified:\n\n%s' % diff)
+
def my_pre_save_handler(sender, **kwargs):
instance = kwargs['instance']
if getattr(instance, 'send_notification', False) and get_current_user():
"%s by %s" % (subj, get_current_user()),
cont)
+
def my_m2m_changed_handler(sender, **kwargs):
instance = kwargs['instance']
if getattr(instance, 'send_m2m_notification', False) and get_current_user():
(cl, f) = sender.__name__.split('_')
if not hasattr(instance, '_stored_m2m'):
- instance._stored_m2m={}
+ instance._stored_m2m = {}
if kwargs['action'] == 'pre_clear':
- instance._stored_m2m[f] = set([unicode(t) for t in getattr(instance,f).all()])
+ instance._stored_m2m[f] = set([unicode(t) for t in getattr(instance, f).all()])
elif kwargs['action'] == 'post_add':
- newset = set([unicode(t) for t in getattr(instance,f).all()])
+ newset = set([unicode(t) for t in getattr(instance, f).all()])
added = newset.difference(instance._stored_m2m.get(f, set()))
removed = instance._stored_m2m.get(f, set()).difference(newset)
subj = '{0} id {1} has been modified'.format(instance._meta.verbose_name, instance.id)
if added or removed:
send_simple_mail(settings.NOTIFICATION_FROM,
- settings.NOTIFICATION_EMAIL,
- "%s by %s" % (subj, get_current_user()),
- "The following values for {0} were changed:\n\n{1}\n{2}\n\n".format(
- instance._meta.get_field(f).verbose_name,
- "\n".join([u"Added: %s" % a for a in added]),
- "\n".join([u"Removed: %s" % r for r in removed]),
- ))
+ settings.NOTIFICATION_EMAIL,
+ "%s by %s" % (subj, get_current_user()),
+ "The following values for {0} were changed:\n\n{1}\n{2}\n\n".format(
+ instance._meta.get_field(f).verbose_name,
+ "\n".join([u"Added: %s" % a for a in added]),
+ "\n".join([u"Removed: %s" % r for r in removed]),
+ ))
+
def my_pre_delete_handler(sender, **kwargs):
instance = kwargs['instance']
instance._meta.verbose_name,
instance.id,
get_current_user()),
- _get_full_text_representation(instance))
+ _get_full_text_representation(instance))
+
def my_post_save_handler(sender, **kwargs):
instance = kwargs['instance']
purgelist = instance.purge_urls
map(varnish_purge, purgelist)
+
def register_basic_signal_handlers():
pre_save.connect(my_pre_save_handler)
pre_delete.connect(my_pre_delete_handler)
from django.conf import settings
+
def get_all_pages_struct(method='get_struct'):
"""
Return an iterator over all distinct pages on the site.
for app in settings.INSTALLED_APPS:
if app.startswith('pgweb.'):
try:
- m = __import__(app+".struct", {}, {}, method)
+ m = __import__(app + ".struct", {}, {}, method)
except:
# Failed to import - probably module didnd't exist
continue
_thread_locals = local()
+
def initialize_template_collection():
_thread_locals.templates = []
+
def get_all_templates():
return getattr(_thread_locals, 'templates', [])
+
class TrackingTemplateLoader(django.template.loaders.base.Loader):
def get_template_sources(self, template_name):
_thread_locals.templates = getattr(_thread_locals, 'templates', []) + [template_name, ]
r = Random.new()
key = r.read(32)
print base64.b64encode(key)
-
from Crypto import Random
import time
+
class AuthBackend(ModelBackend):
# We declare a fake backend that always fails direct authentication -
# since we should never be using direct authentication in the first place!
r = Random.new()
iv = r.read(16)
encryptor = AES.new(SHA.new(settings.SECRET_KEY).digest()[:16], AES.MODE_CBC, iv)
- cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) # pad to 16 bytes
+ cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16))) # pad to 16 bytes
return HttpResponseRedirect("%s?d=%s$%s" % (
- settings.PGAUTH_REDIRECT,
- base64.b64encode(iv, "-_"),
- base64.b64encode(cipher, "-_"),
- ))
+ settings.PGAUTH_REDIRECT,
+ base64.b64encode(iv, "-_"),
+ base64.b64encode(cipher, "-_"),
+ ))
else:
return HttpResponseRedirect(settings.PGAUTH_REDIRECT)
+
# Handle logout requests by logging out of this site and then
# redirecting to log out from the main site as well.
def logout(request):
django_logout(request)
return HttpResponseRedirect("%slogout/" % settings.PGAUTH_REDIRECT)
+
# Receive an authentication response from the main website and try
# to log the user in.
def auth_receive(request):
changed = True
if user.email != data['e'][0]:
user.email = data['e'][0]
- changed= True
+ changed = True
if changed:
user.save()
except User.DoesNotExist:
u = urllib.urlopen('%ssearch/?%s' % (
settings.PGAUTH_REDIRECT,
urllib.urlencode(q),
- ))
+ ))
(ivs, datas) = u.read().split('&')
u.close()
return j
+
# Import a user into the local authentication system. Will initially
# make a search for it, and if anything other than one entry is returned
# the import will fail.
'f': options.first,
'l': options.last,
'e': options.email,
- }
+ }
if options.suburl:
info['su'] = options.suburl
# the first block more random..
# Since this is a fake authentication, put it 5 minutes into the future to
# give more time to copy/paste it.
- s = "t=%s&%s" % (int(time.time()+300), urllib.urlencode(info))
+ s = "t=%s&%s" % (int(time.time() + 300), urllib.urlencode(info))
r = Random.new()
iv = r.read(16)
encryptor = AES.new(base64.b64decode(options.key), AES.MODE_CBC, iv)
- cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16)))
+ cipher = encryptor.encrypt(s + ' ' * (16 - (len(s) % 16)))
print "Paste the following after the receiving url:"
print "?i=%s&d=%s" % (
base64.b64encode(iv, "-_"),
base64.b64encode(cipher, "-_"),
- )
+ )
re_titlematch = re.compile('<title\s*>([^<]+)</title\s*>', re.IGNORECASE)
-## Load a single page
+
+# Load a single page
def load_doc_file(filename, f):
- tidyopts = dict(drop_proprietary_attributes=1,
- alt_text='',
- hide_comments=1,
- output_xhtml=1,
- show_body_only=1,
- clean=1,
- char_encoding='utf8',
- indent='auto',
- )
+ tidyopts = dict(
+ drop_proprietary_attributes=1,
+ alt_text='',
+ hide_comments=1,
+ output_xhtml=1,
+ show_body_only=1,
+ clean=1,
+ char_encoding='utf8',
+ indent='auto',
+ )
# Postgres 10 started using xml toolchain and now produces docmentation in utf8. So we need
# to figure out which version it is.
if not quiet: print "--- file: %s (%s) ---" % (filename, title)
s = tidy.parseString(contents.encode('utf-8'), **tidyopts)
- curs.execute("INSERT INTO docs (file, version, title, content) VALUES (%(f)s, %(v)s, %(t)s, %(c)s)",{
+ curs.execute("INSERT INTO docs (file, version, title, content) VALUES (%(f)s, %(v)s, %(t)s, %(c)s)", {
'f': filename,
'v': ver,
't': title,
global pagecount
pagecount += 1
-## Main execution
+# Main execution
parser = OptionParser(usage="usage: %prog [options] <version> <tarfile>")
parser.add_option("-q", "--quiet", action="store_true", dest="quiet",
help="Run quietly")
connection.close()
if not quiet: print "Done (%i pages)." % pagecount
-
allnodes = {}
+
def read_file(fn):
f = codecs.open(fn, 'r', encoding='utf-8', errors='replace')
t = f.read()
f.close()
return t
+
def parse_directory(dirname, rootlen):
mynode = {}
for f in os.listdir(dirname):
mynode[f] = {
't': 'l',
'd': os.readlink(fn).strip("/"),
- }
+ }
else:
# This is a subdirectory, recurse into it, unless it happens
# to be on our exclude list.
allnodes[dirname[rootlen:].strip("/")] = mynode
+
def Usage():
print "Usage: spider_ftp.py <ftp_root> <pickle_file>"
print ""
print "to that URL instead of written to the filesystem."
sys.exit(1)
+
if len(sys.argv) != 3: Usage()
parse_directory(sys.argv[1], len(sys.argv[1]))
}
archs = ['x86_64', 'i386', 'i686', 'ppc64le']
+
def generate_platform(dirname, familyprefix, ver, installer, systemd):
for f in platform_names.keys():
yield ('%s-%s' % (f, ver), {
'f': f,
'i': installer,
'd': systemd,
- 's': platform_sort[f]*1000-ver,
+ 's': platform_sort[f] * 1000 - ver,
'found': False,
- })
+ })
+
def get_redhat_systemd(ver):
return (ver >= 7)
+
platforms = {}
-for v in range(5, 7+1):
+for v in range(5, 7 + 1):
platforms.update(dict(generate_platform('redhat', 'rhel', v, 'yum', get_redhat_systemd(v))))
-for v in range(24, 30+1):
+for v in range(24, 30 + 1):
platforms.update(dict(generate_platform('fedora', 'fedora', v, 'dnf', True)))
re_reporpm = re.compile('^pgdg-([a-z0-9-]+)([0-9]{2})-[^-]+-(\d+)\.noarch\.rpm$')
break
else:
# DEBUG
-# print "%s (%s) not found in platform list" % (familypath, shortdist)
+ # print "%s (%s) not found in platform list" % (familypath, shortdist)
pass
# Filter all platforms that are not used
- platforms = {k:v for k,v in platforms.iteritems() if v['found']}
- for k,v in platforms.iteritems():
+ platforms = {k: v for k, v in platforms.iteritems() if v['found']}
+ for k, v in platforms.iteritems():
del v['found']
j = json.dumps({'platforms': platforms, 'reporpms': reporpms})
import re
import HTMLParser
-BOUNDARY="-=--=foobar-=--="
+BOUNDARY = "-=--=foobar-=--="
+
def encode_multipart_formdata(fields, files):
L = []
body = "\r\n".join(L)
return body
-if __name__=="__main__":
+
+if __name__ == "__main__":
if len(sys.argv) != 2:
print "Usage: localhtmlvalidate.py <local url>"
sys.exit(1)
firstline = 0
# Generate a form body
- body = encode_multipart_formdata([
+ body = encode_multipart_formdata(
+ [
('charset', 'utf-8'),
('doctype', 'inline'),
('group', '0'),
('verbose', '1'),
- ],
- [('uploaded_file', 'test.html', contents)])
+ ],
+ [('uploaded_file', 'test.html', contents)]
+ )
# Now submit it to the w3c validator
h = httplib.HTTP("validator.w3.org")
print "Unknown status: %s" % headers['x-w3c-validator-status']
print headers
sys.exit(1)
-
-
import psycopg2
# Templates that we don't want to ban automatically
-BANNED_TEMPLATES=(
+BANNED_TEMPLATES = (
'base/base.html',
)
from lib.log import log
from lib.parsers import ArchivesParser
+
class MultiListCrawler(object):
def __init__(self, lists, conn, status_interval=30, commit_interval=500):
self.lists = lists
for listid, listname in self.lists:
if full:
# Generate a sequence of everything to index
- for year in range(1997, datetime.datetime.now().year+1):
- for month in range(1,13):
+ for year in range(1997, datetime.datetime.now().year + 1):
+ for month in range(1, 13):
self.queue.put((listid, listname, year, month, -1))
elif month:
# Do one specific month
curs = self.conn.cursor()
curr = datetime.date.today()
if curr.month == 1:
- prev = datetime.date(curr.year-1, 12, 1)
+ prev = datetime.date(curr.year - 1, 12, 1)
else:
- prev = datetime.date(curr.year, curr.month-1, 1)
+ prev = datetime.date(curr.year, curr.month - 1, 1)
for d in curr, prev:
# Figure out what the highest indexed page in this
# month is.
curs.execute("SELECT max(msgnum) FROM messages WHERE list=%(list)s AND year=%(year)s AND month=%(month)s", {
- 'list': listid,
- 'year': d.year,
- 'month': d.month,
- })
+ 'list': listid,
+ 'year': d.year,
+ 'month': d.month,
+ })
x = curs.fetchall()
if x[0][0] != None:
maxmsg = x[0][0]
for x in range(5):
t = threading.Thread(name="Indexer %s" % x,
- target = lambda: self.crawl_from_queue())
- t.daemon= True
+ target=lambda: self.crawl_from_queue())
+ t.daemon = True
t.start()
- t = threading.Thread(name="statusthread", target = lambda: self.status_thread())
+ t = threading.Thread(name="statusthread", target=lambda: self.status_thread())
t.daemon = True
t.start()
with self.counterlock:
log("Indexed %s messages so far (%s active threads, %s months still queued, %.1f msg/sec)" % (
self.counter,
- threading.active_count() - 2 , # main thread + status thread
+ threading.active_count() - 2, # main thread + status thread
self.queue.qsize(),
self.counter / (nowtime - starttime),
- ))
+ ))
# Commit every 500 messages
if self.counter - lastcommit > self.commit_interval:
lastcommit = self.counter
# We return true to move on to the next message anyway!
return True
curs.execute("INSERT INTO messages (list, year, month, msgnum, date, subject, author, txt, fti) VALUES (%(listid)s, %(year)s, %(month)s, %(msgnum)s, %(date)s, %(subject)s, %(author)s, %(txt)s, setweight(to_tsvector('pg', %(subject)s), 'A') || to_tsvector('pg', %(txt)s))", {
- 'listid': listid,
- 'year': year,
- 'month': month,
- 'msgnum': msgnum,
- 'date': p.date,
- 'subject': p.subject[:127],
- 'author': p.author[:127],
- 'txt': p.body,
- })
+ 'listid': listid,
+ 'year': year,
+ 'month': month,
+ 'msgnum': msgnum,
+ 'date': p.date,
+ 'subject': p.subject[:127],
+ 'author': p.author[:127],
+ 'txt': p.body,
+ })
with self.counterlock:
self.counter += 1
from lib.log import log
from lib.parsers import GenericHtmlParser, lossy_unicode
+
class BaseSiteCrawler(object):
def __init__(self, hostname, dbconn, siteid, serverip=None, https=False):
self.hostname = hostname
# Fire off worker threads
for x in range(5):
t = threading.Thread(name="Indexer %s" % x,
- target = lambda: self.crawl_from_queue())
+ target=lambda: self.crawl_from_queue())
t.daemon = True
t.start()
- t = threading.Thread(name="statusthread", target = lambda: self.status_thread())
+ t = threading.Thread(name="statusthread", target=lambda: self.status_thread())
t.daemon = True
t.start()
# Remove all pages that we didn't crawl
curs = self.dbconn.cursor()
curs.execute("DELETE FROM webpages WHERE site=%(site)s AND NOT suburl=ANY(%(urls)s)", {
- 'site': self.siteid,
- 'urls': self.pages_crawled.keys(),
- })
+ 'site': self.siteid,
+ 'urls': self.pages_crawled.keys(),
+ })
if curs.rowcount:
log("Deleted %s pages no longer accessible" % curs.rowcount)
self.pages_deleted += curs.rowcount
threading.active_count() - 2,
self.queue.qsize(),
len(self.pages_crawled) / (nowtime - starttime),
- ))
+ ))
def crawl_from_queue(self):
while not self.stopevent.is_set():
return False
def crawl_page(self, url, relprio, internal):
- if self.pages_crawled.has_key(url) or self.pages_crawled.has_key(url+"/"):
+ if self.pages_crawled.has_key(url) or self.pages_crawled.has_key(url + "/"):
return
if self.exclude_url(url):
# Page failed to load or was a redirect, so remove from database
curs = self.dbconn.cursor()
curs.execute("DELETE FROM webpages WHERE site=%(id)s AND suburl=%(url)s", {
- 'id': self.siteid,
- 'url': url,
- })
+ 'id': self.siteid,
+ 'url': url,
+ })
with self.counterlock:
self.pages_deleted += curs.rowcount
'url': url,
'relprio': relprio,
'internal': internal,
- }
+ }
curs = self.dbconn.cursor()
curs.execute("UPDATE webpages SET title=%(title)s, txt=%(txt)s, fti=setweight(to_tsvector('public.pg', %(title)s), 'A') || to_tsvector('public.pg', %(txt)s), lastscanned=%(lastmod)s, relprio=%(relprio)s, isinternal=%(internal)s WHERE site=%(site)s AND suburl=%(url)s", params)
if curs.rowcount != 1:
self.pages_updated += 1
ACCEPTED_CONTENTTYPES = ("text/html", "text/plain", )
+
def accept_contenttype(self, contenttype):
# Split apart if there is a "; charset=" in it
if contenttype.find(";"):
- contenttype = contenttype.split(';',2)[0]
+ contenttype = contenttype.split(';', 2)[0]
return contenttype in self.ACCEPTED_CONTENTTYPES
def fetch_page(self, url):
else:
h = httplib.HTTPSConnection(host=self.hostname, port=443, strict=True, timeout=10, context=ssl._create_unverified_context())
h.putrequest("GET", url)
- h.putheader("User-agent","pgsearch/0.2")
- h.putheader("Connection","close")
+ h.putheader("User-agent", "pgsearch/0.2")
+ h.putheader("Connection", "close")
if self.scantimes.has_key(url):
h.putheader("If-Modified-Since", formatdate(time.mktime(self.scantimes[url].timetuple())))
h.endheaders()
# No redirect at all found, becaue it was invalid?
return (2, None, None)
else:
- #print "Url %s returned status %s" % (url, resp.status)
+ # print "Url %s returned status %s" % (url, resp.status)
pass
except Exception, e:
log("Exception when loading url %s: %s" % (url, e))
from basecrawler import BaseSiteCrawler
from parsers import RobotsParser
+
class GenericSiteCrawler(BaseSiteCrawler):
def __init__(self, hostname, dbconn, siteid, https=False):
super(GenericSiteCrawler, self).__init__(hostname, dbconn, siteid, https=https)
# robots.txt ones)
curs = self.dbconn.cursor()
curs.execute("SELECT suburlre FROM site_excludes WHERE site=%(site)s", {
- 'site': self.siteid,
- })
+ 'site': self.siteid,
+ })
self.extra_excludes = [re.compile(x) for x, in curs.fetchall()]
# We *always* crawl the root page, of course
def post_process_page(self, url):
for l in self.resolve_links(self.page.links, url):
- if self.pages_crawled.has_key(l) or self.pages_crawled.has_key(l+"/"):
+ if self.pages_crawled.has_key(l) or self.pages_crawled.has_key(l + "/"):
continue
if self.exclude_url(l):
continue
# Yes, this is trivial, but we might want to put something
# more here in the future :)
import datetime
+
+
def log(msg):
print "%s: %s" % (datetime.datetime.now(), msg)
-
from lib.log import log
+
class GenericHtmlParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
if tag == "body":
self.inbody = True
if tag == "a":
- for a,v in attrs:
+ for a, v in attrs:
if a == "href":
self.links.append(v)
self.inbody = False
DATA_IGNORE_TAGS = ("script",)
+
def handle_data(self, data):
d = data.strip()
if len(d) < 2:
class ArchivesParser(object):
rematcher = re.compile("<!--X-Subject: ([^\n]*) -->.*<!--X-From-R13: ([^\n]*) -->.*<!--X-Date: ([^\n]*) -->.*<!--X-Body-of-Message-->(.*)<!--X-Body-of-Message-End-->", re.DOTALL)
hp = HTMLParser()
+
def __init__(self):
self.subject = None
self.author = None
_date_multi_re = re.compile(' \((\w+\s\w+|)\)$')
_date_trailing_envelope = re.compile('\s+\(envelope.*\)$')
+
def parse_date(self, d):
# For some reason, we have dates that look like this:
# http://archives.postgresql.org/pgsql-bugs/1999-05/msg00018.php
# So we copy the brokenness here.
# This code is from MHonArc/ewhutil.pl, mrot13()
_arot13_trans = dict(zip(map(ord,
- u'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[abcdefghijklmnopqrstuvwxyz'),
- u'NOPQRSTUVWXYZ[@ABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm'))
+ u'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[abcdefghijklmnopqrstuvwxyz'),
+ u'NOPQRSTUVWXYZ[@ABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm'))
+
def almost_rot13(self, s):
return unicode(s).translate(self._arot13_trans)
+
class RobotsParser(object):
def __init__(self, url):
try:
from lib.log import log
from lib.basecrawler import BaseSiteCrawler
+
class SitemapParser(object):
def __init__(self):
self.urls = []
self.getlastmod = False
self.currstr = ""
self.internal = False
- self.parser.StartElementHandler = lambda name,attrs: self.processelement(name,attrs)
+ self.parser.StartElementHandler = lambda name, attrs: self.processelement(name, attrs)
self.parser.EndElementHandler = lambda name: self.processendelement(name)
self.parser.CharacterDataHandler = lambda data: self.processcharacterdata(data)
self.internal = internal
if self.geturl or self.getprio or self.getlastmod:
self.currstr += data
+
class SitemapSiteCrawler(BaseSiteCrawler):
def __init__(self, hostname, dbconn, siteid, serverip, https=False):
super(SitemapSiteCrawler, self).__init__(hostname, dbconn, siteid, serverip, https)
for url, prio, lastmod, internal in p.urls:
# Advance 8 characters - length of https://.
- url = url[len(self.hostname)+8:]
+ url = url[len(self.hostname) + 8:]
if lastmod:
if self.scantimes.has_key(url):
if lastmod < self.scantimes[url]:
from multiprocessing import Process
+
# Wrap a method call in a different process, so that we can process
# keyboard interrupts and actually terminate it if we have to.
# python threading makes it often impossible to Ctlr-C it otherwise..
import sys
import time
+
def doit(opt):
cp = ConfigParser()
cp.read("search.ini")
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
- conn = psycopg2.connect(cp.get("search","db"))
+ conn = psycopg2.connect(cp.get("search", "db"))
curs = conn.cursor()
if opt.list:
# Multiple lists can be specified with a comma separator (no spaces)
curs.execute("SELECT id,name FROM lists WHERE name=ANY(%(names)s)", {
- 'names': opt.list.split(','),
- })
+ 'names': opt.list.split(','),
+ })
else:
curs.execute("SELECT id,name FROM lists WHERE active ORDER BY id")
- listinfo = [(id,name) for id,name in curs.fetchall()]
+ listinfo = [(id, name) for id, name in curs.fetchall()]
c = MultiListCrawler(listinfo, conn, opt.status_interval, opt.commit_interval)
n = c.crawl(opt.full, opt.month)
log("Indexed %s messages" % n)
time.sleep(1)
-if __name__=="__main__":
+
+if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-l", "--list", dest='list', help="Crawl only this list")
parser.add_option("-m", "--month", dest='month', help="Crawl only this month")
import urllib
import simplejson as json
-if __name__=="__main__":
+if __name__ == "__main__":
cp = ConfigParser()
cp.read("search.ini")
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
- conn = psycopg2.connect(cp.get("search","db"))
+ conn = psycopg2.connect(cp.get("search", "db"))
curs = conn.cursor()
u = urllib.urlopen("http://%s/community/lists/listinfo/" % cp.get("search", "web"))
import psycopg2
import time
+
def doit():
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
- conn = psycopg2.connect(cp.get("search","db"))
+ conn = psycopg2.connect(cp.get("search", "db"))
curs = conn.cursor()
time.sleep(1)
-if __name__=="__main__":
+if __name__ == "__main__":
cp = ConfigParser()
cp.read("search.ini")
from datetime import timedelta
# Up to 5 minutes delay is ok
-WARNING_THRESHOLD=timedelta(minutes=5)
+WARNING_THRESHOLD = timedelta(minutes=5)
# More than 15 minutes something is definitely wrong
-CRITICAL_THRESHOLD=timedelta(minutes=15)
+CRITICAL_THRESHOLD = timedelta(minutes=15)
if __name__ == "__main__":
if len(sys.argv) != 2:
import psycopg2
from setproctitle import setproctitle
+
def do_purge(consumername, headers):
try:
conn = httplib.HTTPSConnection('%s.postgresql.org' % consumername)
return False
return True
+
def worker(consumerid, consumername, dsn):
logging.info("Starting worker for %s" % consumername)
setproctitle("varnish_queue - worker for %s" % consumername)
# Nothing, so roll back the transaction and wait
conn.rollback()
- select.select([conn],[],[],5*60)
+ select.select([conn], [], [], 5 * 60)
conn.poll()
while conn.notifies:
conn.notifies.pop()
conn.commit()
else:
conn.rollback()
- time.sleep(5*60)
+ time.sleep(5 * 60)
if __name__ == "__main__":