In a quest to reach pep8, use spaces to indent rather than tabs.
from models import CommunityAuthSite, CommunityAuthOrg
class CommunityAuthSiteAdminForm(forms.ModelForm):
- class Meta:
- model = CommunityAuthSite
- exclude = ()
+ class Meta:
+ model = CommunityAuthSite
+ exclude = ()
- def clean_cryptkey(self):
- x = None
- try:
- x = base64.b64decode(self.cleaned_data['cryptkey'])
- except TypeError:
- raise forms.ValidationError("Crypto key must be base64 encoded")
+ def clean_cryptkey(self):
+ x = None
+ try:
+ x = base64.b64decode(self.cleaned_data['cryptkey'])
+ except TypeError:
+ raise forms.ValidationError("Crypto key must be base64 encoded")
- if (len(x) != 16 and len(x) != 24 and len(x) != 32):
- raise forms.ValidationError("Crypto key must be 16, 24 or 32 bytes before being base64-encoded")
- return self.cleaned_data['cryptkey']
+ if (len(x) != 16 and len(x) != 24 and len(x) != 32):
+ raise forms.ValidationError("Crypto key must be 16, 24 or 32 bytes before being base64-encoded")
+ return self.cleaned_data['cryptkey']
class CommunityAuthSiteAdmin(admin.ModelAdmin):
- form = CommunityAuthSiteAdminForm
+ form = CommunityAuthSiteAdminForm
class PGUserChangeForm(UserChangeForm):
- """just like UserChangeForm, butremoves "username" requirement"""
- def __init__(self, *args, **kwargs):
- super(PGUserChangeForm, self).__init__(*args, **kwargs)
- # because the auth.User model is set to "blank=False" and the Django
- # auth.UserChangeForm is setup as a ModelForm, it will always validate
- # the "username" even though it is not present. Thus the best way to
- # avoid the validation is to remove the "username" field, if it exists
- if self.fields.get('username'):
- del self.fields['username']
+ """just like UserChangeForm, butremoves "username" requirement"""
+ def __init__(self, *args, **kwargs):
+ super(PGUserChangeForm, self).__init__(*args, **kwargs)
+ # because the auth.User model is set to "blank=False" and the Django
+ # auth.UserChangeForm is setup as a ModelForm, it will always validate
+ # the "username" even though it is not present. Thus the best way to
+ # avoid the validation is to remove the "username" field, if it exists
+ if self.fields.get('username'):
+ del self.fields['username']
class PGUserAdmin(UserAdmin):
- """overrides default Django user admin"""
- form = PGUserChangeForm
-
- def get_readonly_fields(self, request, obj=None):
- """this prevents users from changing a username once created"""
- if obj:
- return self.readonly_fields + ('username',)
- return self.readonly_fields
+ """overrides default Django user admin"""
+ form = PGUserChangeForm
+
+ def get_readonly_fields(self, request, obj=None):
+ """this prevents users from changing a username once created"""
+ if obj:
+ return self.readonly_fields + ('username',)
+ return self.readonly_fields
admin.site.register(CommunityAuthSite, CommunityAuthSiteAdmin)
admin.site.register(CommunityAuthOrg)
log = logging.getLogger(__name__)
def _clean_username(username):
- username = username.lower()
+ username = username.lower()
- if not re.match('^[a-z0-9\.-]+$', username):
- raise forms.ValidationError("Invalid character in user name. Only a-z, 0-9, . and - allowed for compatibility with third party software.")
- try:
- User.objects.get(username=username)
- except User.DoesNotExist:
- return username
- raise forms.ValidationError("This username is already in use")
+ if not re.match('^[a-z0-9\.-]+$', username):
+ raise forms.ValidationError("Invalid character in user name. Only a-z, 0-9, . and - allowed for compatibility with third party software.")
+ try:
+ User.objects.get(username=username)
+ except User.DoesNotExist:
+ return username
+ raise forms.ValidationError("This username is already in use")
# Override some error handling only in the default authentication form
class PgwebAuthenticationForm(AuthenticationForm):
- def clean(self):
- try:
- return super(PgwebAuthenticationForm, self).clean()
- except ValueError, e:
- if e.message.startswith('Unknown password hashing algorithm'):
- # This is *probably* a user trying to log in with an account that has not
- # been set up properly yet. It could be an actually unsupported hashing
- # algorithm, but we'll deal with that when we get there.
- self._errors["__all__"] = self.error_class(["This account appears not to be properly initialized. Make sure you complete the signup process with the instructions in the email received before trying to use the account."])
- log.warning("User {0} tried to log in with invalid hash, probably because signup was completed.".format(self.cleaned_data['username']))
- return self.cleaned_data
- raise e
+ def clean(self):
+ try:
+ return super(PgwebAuthenticationForm, self).clean()
+ except ValueError, e:
+ if e.message.startswith('Unknown password hashing algorithm'):
+ # This is *probably* a user trying to log in with an account that has not
+ # been set up properly yet. It could be an actually unsupported hashing
+ # algorithm, but we'll deal with that when we get there.
+ self._errors["__all__"] = self.error_class(["This account appears not to be properly initialized. Make sure you complete the signup process with the instructions in the email received before trying to use the account."])
+ log.warning("User {0} tried to log in with invalid hash, probably because signup was completed.".format(self.cleaned_data['username']))
+ return self.cleaned_data
+ raise e
class CommunityAuthConsentForm(forms.Form):
- consent = forms.BooleanField(help_text='Consent to sharing this data')
- next = forms.CharField(widget=forms.widgets.HiddenInput())
+ consent = forms.BooleanField(help_text='Consent to sharing this data')
+ next = forms.CharField(widget=forms.widgets.HiddenInput())
- def __init__(self, orgname, *args, **kwargs):
- self.orgname = orgname
- super(CommunityAuthConsentForm, self).__init__(*args, **kwargs)
+ def __init__(self, orgname, *args, **kwargs):
+ self.orgname = orgname
+ super(CommunityAuthConsentForm, self).__init__(*args, **kwargs)
- self.fields['consent'].label = 'Consent to sharing data with {0}'.format(self.orgname)
+ self.fields['consent'].label = 'Consent to sharing data with {0}'.format(self.orgname)
class SignupForm(forms.Form):
- username = forms.CharField(max_length=30)
- first_name = forms.CharField(max_length=30)
- last_name = forms.CharField(max_length=30)
- email = forms.EmailField()
- email2 = forms.EmailField(label="Repeat email")
- captcha = ReCaptchaField()
-
- def __init__(self, remoteip, *args, **kwargs):
- super(SignupForm, self).__init__(*args, **kwargs)
- self.fields['captcha'].set_ip(remoteip)
-
- def clean_email2(self):
- # If the primary email checker had an exception, the data will be gone
- # from the cleaned_data structure
- if not self.cleaned_data.has_key('email'):
- return self.cleaned_data['email2']
- email1 = self.cleaned_data['email'].lower()
- email2 = self.cleaned_data['email2'].lower()
-
- if email1 != email2:
- raise forms.ValidationError("Email addresses don't match")
- return email2
-
- def clean_username(self):
- return _clean_username(self.cleaned_data['username'])
-
- def clean_email(self):
- email = self.cleaned_data['email'].lower()
-
- try:
- User.objects.get(email=email)
- except User.DoesNotExist:
- return email
- raise forms.ValidationError("A user with this email address is already registered")
+ username = forms.CharField(max_length=30)
+ first_name = forms.CharField(max_length=30)
+ last_name = forms.CharField(max_length=30)
+ email = forms.EmailField()
+ email2 = forms.EmailField(label="Repeat email")
+ captcha = ReCaptchaField()
+
+ def __init__(self, remoteip, *args, **kwargs):
+ super(SignupForm, self).__init__(*args, **kwargs)
+ self.fields['captcha'].set_ip(remoteip)
+
+ def clean_email2(self):
+ # If the primary email checker had an exception, the data will be gone
+ # from the cleaned_data structure
+ if not self.cleaned_data.has_key('email'):
+ return self.cleaned_data['email2']
+ email1 = self.cleaned_data['email'].lower()
+ email2 = self.cleaned_data['email2'].lower()
+
+ if email1 != email2:
+ raise forms.ValidationError("Email addresses don't match")
+ return email2
+
+ def clean_username(self):
+ return _clean_username(self.cleaned_data['username'])
+
+ def clean_email(self):
+ email = self.cleaned_data['email'].lower()
+
+ try:
+ User.objects.get(email=email)
+ except User.DoesNotExist:
+ return email
+ raise forms.ValidationError("A user with this email address is already registered")
class SignupOauthForm(forms.Form):
- username = forms.CharField(max_length=30)
- first_name = forms.CharField(max_length=30, required=False)
- last_name = forms.CharField(max_length=30, required=False)
- email = forms.EmailField()
- captcha = ReCaptchaField()
-
- def __init__(self, *args, **kwargs):
- super(SignupOauthForm, self).__init__(*args, **kwargs)
- self.fields['first_name'].widget.attrs['readonly'] = True
- self.fields['first_name'].widget.attrs['disabled'] = True
- self.fields['last_name'].widget.attrs['readonly'] = True
- self.fields['last_name'].widget.attrs['disabled'] = True
- self.fields['email'].widget.attrs['readonly'] = True
- self.fields['email'].widget.attrs['disabled'] = True
-
- def clean_username(self):
- return _clean_username(self.cleaned_data['username'])
-
- def clean_email(self):
- return self.cleaned_data['email'].lower()
+ username = forms.CharField(max_length=30)
+ first_name = forms.CharField(max_length=30, required=False)
+ last_name = forms.CharField(max_length=30, required=False)
+ email = forms.EmailField()
+ captcha = ReCaptchaField()
+
+ def __init__(self, *args, **kwargs):
+ super(SignupOauthForm, self).__init__(*args, **kwargs)
+ self.fields['first_name'].widget.attrs['readonly'] = True
+ self.fields['first_name'].widget.attrs['disabled'] = True
+ self.fields['last_name'].widget.attrs['readonly'] = True
+ self.fields['last_name'].widget.attrs['disabled'] = True
+ self.fields['email'].widget.attrs['readonly'] = True
+ self.fields['email'].widget.attrs['disabled'] = True
+
+ def clean_username(self):
+ return _clean_username(self.cleaned_data['username'])
+
+ def clean_email(self):
+ return self.cleaned_data['email'].lower()
class UserProfileForm(forms.ModelForm):
- class Meta:
- model = UserProfile
- exclude = ('user',)
+ class Meta:
+ model = UserProfile
+ exclude = ('user',)
class UserForm(forms.ModelForm):
- def __init__(self, *args, **kwargs):
- super(UserForm, self).__init__(*args, **kwargs)
- self.fields['first_name'].required = True
- self.fields['last_name'].required = True
- class Meta:
- model = User
- fields = ('first_name', 'last_name', )
+ def __init__(self, *args, **kwargs):
+ super(UserForm, self).__init__(*args, **kwargs)
+ self.fields['first_name'].required = True
+ self.fields['last_name'].required = True
+ class Meta:
+ model = User
+ fields = ('first_name', 'last_name', )
class ContributorForm(forms.ModelForm):
- class Meta:
- model = Contributor
- exclude = ('ctype', 'lastname', 'firstname', 'user', )
+ class Meta:
+ model = Contributor
+ exclude = ('ctype', 'lastname', 'firstname', 'user', )
class ChangeEmailForm(forms.Form):
- email = forms.EmailField()
- email2 = forms.EmailField(label="Repeat email")
+ email = forms.EmailField()
+ email2 = forms.EmailField(label="Repeat email")
- def __init__(self, user, *args, **kwargs):
- super(ChangeEmailForm, self).__init__(*args, **kwargs)
- self.user = user
+ def __init__(self, user, *args, **kwargs):
+ super(ChangeEmailForm, self).__init__(*args, **kwargs)
+ self.user = user
- def clean_email(self):
- email = self.cleaned_data['email'].lower()
+ def clean_email(self):
+ email = self.cleaned_data['email'].lower()
- if email == self.user.email:
- raise forms.ValidationError("This is your existing email address!")
+ if email == self.user.email:
+ raise forms.ValidationError("This is your existing email address!")
- if User.objects.filter(email=email).exists():
- raise forms.ValidationError("A user with this email address is already registered")
+ if User.objects.filter(email=email).exists():
+ raise forms.ValidationError("A user with this email address is already registered")
- return email
+ return email
- def clean_email2(self):
- # If the primary email checker had an exception, the data will be gone
- # from the cleaned_data structure
- if not self.cleaned_data.has_key('email'):
- return self.cleaned_data['email2'].lower()
- email1 = self.cleaned_data['email'].lower()
- email2 = self.cleaned_data['email2'].lower()
+ def clean_email2(self):
+ # If the primary email checker had an exception, the data will be gone
+ # from the cleaned_data structure
+ if not self.cleaned_data.has_key('email'):
+ return self.cleaned_data['email2'].lower()
+ email1 = self.cleaned_data['email'].lower()
+ email2 = self.cleaned_data['email2'].lower()
- if email1 != email2:
- raise forms.ValidationError("Email addresses don't match")
- return email2
+ if email1 != email2:
+ raise forms.ValidationError("Email addresses don't match")
+ return email2
class PgwebPasswordResetForm(forms.Form):
- email = forms.EmailField()
+ email = forms.EmailField()
]
operations = [
- migrations.RunSQL("UPDATE auth_user SET email=lower(email) WHERE email!=lower(email)"),
- migrations.RunSQL("ALTER TABLE auth_user ADD CONSTRAINT email_must_be_lowercase CHECK (email=lower(email))"),
- migrations.RunSQL("CREATE UNIQUE INDEX auth_user_email_lower_key ON auth_user USING btree(lower(email))"),
+ migrations.RunSQL("UPDATE auth_user SET email=lower(email) WHERE email!=lower(email)"),
+ migrations.RunSQL("ALTER TABLE auth_user ADD CONSTRAINT email_must_be_lowercase CHECK (email=lower(email))"),
+ migrations.RunSQL("CREATE UNIQUE INDEX auth_user_email_lower_key ON auth_user USING btree(lower(email))"),
]
from django.contrib.auth.models import User
class CommunityAuthOrg(models.Model):
- orgname = models.CharField(max_length=100, null=False, blank=False,
- help_text="Name of the organisation")
- require_consent = models.BooleanField(null=False, blank=False, default=True)
+ orgname = models.CharField(max_length=100, null=False, blank=False,
+ help_text="Name of the organisation")
+ require_consent = models.BooleanField(null=False, blank=False, default=True)
- def __unicode__(self):
- return self.orgname
+ def __unicode__(self):
+ return self.orgname
class CommunityAuthSite(models.Model):
- name = models.CharField(max_length=100, null=False, blank=False,
- help_text="Note that the value in this field is shown on the login page, so make sure it's user-friendly!")
- redirecturl = models.URLField(max_length=200, null=False, blank=False)
- cryptkey = models.CharField(max_length=100, null=False, blank=False,
- help_text="Use tools/communityauth/generate_cryptkey.py to create a key")
- comment = models.TextField(null=False, blank=True)
- org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False)
- cooloff_hours = models.IntegerField(null=False, blank=False, default=0,
- help_text="Number of hours a user must have existed in the systems before allowed to log in to this site")
+ name = models.CharField(max_length=100, null=False, blank=False,
+ help_text="Note that the value in this field is shown on the login page, so make sure it's user-friendly!")
+ redirecturl = models.URLField(max_length=200, null=False, blank=False)
+ cryptkey = models.CharField(max_length=100, null=False, blank=False,
+ help_text="Use tools/communityauth/generate_cryptkey.py to create a key")
+ comment = models.TextField(null=False, blank=True)
+ org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False)
+ cooloff_hours = models.IntegerField(null=False, blank=False, default=0,
+ help_text="Number of hours a user must have existed in the systems before allowed to log in to this site")
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
class CommunityAuthConsent(models.Model):
- user = models.ForeignKey(User, null=False, blank=False)
- org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False)
- consentgiven = models.DateTimeField(null=False, blank=False)
+ user = models.ForeignKey(User, null=False, blank=False)
+ org = models.ForeignKey(CommunityAuthOrg, null=False, blank=False)
+ consentgiven = models.DateTimeField(null=False, blank=False)
- class Meta:
- unique_together = (('user', 'org'), )
+ class Meta:
+ unique_together = (('user', 'org'), )
class EmailChangeToken(models.Model):
- user = models.OneToOneField(User, null=False, blank=False)
- email = models.EmailField(max_length=75, null=False, blank=False)
- token = models.CharField(max_length=100, null=False, blank=False)
- sentat = models.DateTimeField(null=False, blank=False, auto_now=True)
+ user = models.OneToOneField(User, null=False, blank=False)
+ email = models.EmailField(max_length=75, null=False, blank=False)
+ token = models.CharField(max_length=100, null=False, blank=False)
+ sentat = models.DateTimeField(null=False, blank=False, auto_now=True)
class OAuthException(Exception):
- pass
+ pass
#
# Generic OAuth login for multiple providers
#
def _login_oauth(request, provider, authurl, tokenurl, scope, authdatafunc):
- from requests_oauthlib import OAuth2Session
-
- client_id = settings.OAUTH[provider]['clientid']
- client_secret = settings.OAUTH[provider]['secret']
- redir = '{0}/account/login/{1}/'.format(settings.SITE_ROOT, provider)
-
- oa = OAuth2Session(client_id, scope=scope, redirect_uri=redir)
- if request.GET.has_key('code'):
- log.info("Completing {0} oauth2 step from {1}".format(provider, get_client_ip(request)))
-
- # Receiving a login request from the provider, so validate data
- # and log the user in.
- if request.GET.get('state', '') != request.session.pop('oauth_state'):
- log.warning("Invalid state received in {0} oauth2 step from {1}".format(provider, get_client_ip(request)))
- raise OAuthException("Invalid OAuth state received")
-
- token = oa.fetch_token(tokenurl,
- client_secret=client_secret,
- code=request.GET['code'])
- try:
- (email, firstname, lastname) = authdatafunc(oa)
- email = email.lower()
- except KeyError, e:
- log.warning("Oauth signing using {0} was missing data: {1}".format(provider, e))
- return HttpResponse('OAuth login was missing critical data. To log in, you need to allow access to email, first name and last name!')
-
- try:
- user = User.objects.get(email=email)
- except User.DoesNotExist:
- log.info("Oauth signin of {0} using {1} from {2}. User not found, offering signup.".format(email, provider, get_client_ip(request)))
-
- # Offer the user a chance to sign up. The full flow is
- # handled elsewhere, so store the details we got from
- # the oauth login in the session, and pass the user on.
- request.session['oauth_email'] = email
- request.session['oauth_firstname'] = firstname or ''
- request.session['oauth_lastname'] = lastname or ''
- return HttpResponseRedirect('/account/signup/oauth/')
-
- log.info("Oauth signin of {0} using {1} from {2}.".format(email, provider, get_client_ip(request)))
-
- user.backend = settings.AUTHENTICATION_BACKENDS[0]
- django_login(request, user)
- n = request.session.pop('login_next')
- if n:
- return HttpResponseRedirect(n)
- else:
- return HttpResponseRedirect('/account/')
- else:
- log.info("Initiating {0} oauth2 step from {1}".format(provider, get_client_ip(request)))
- # First step is redirect to provider
- authorization_url, state = oa.authorization_url(
- authurl,
- prompt='consent',
- )
- request.session['login_next'] = request.GET.get('next', '')
- request.session['oauth_state'] = state
- request.session.modified = True
- return HttpResponseRedirect(authorization_url)
+ from requests_oauthlib import OAuth2Session
+
+ client_id = settings.OAUTH[provider]['clientid']
+ client_secret = settings.OAUTH[provider]['secret']
+ redir = '{0}/account/login/{1}/'.format(settings.SITE_ROOT, provider)
+
+ oa = OAuth2Session(client_id, scope=scope, redirect_uri=redir)
+ if request.GET.has_key('code'):
+ log.info("Completing {0} oauth2 step from {1}".format(provider, get_client_ip(request)))
+
+ # Receiving a login request from the provider, so validate data
+ # and log the user in.
+ if request.GET.get('state', '') != request.session.pop('oauth_state'):
+ log.warning("Invalid state received in {0} oauth2 step from {1}".format(provider, get_client_ip(request)))
+ raise OAuthException("Invalid OAuth state received")
+
+ token = oa.fetch_token(tokenurl,
+ client_secret=client_secret,
+ code=request.GET['code'])
+ try:
+ (email, firstname, lastname) = authdatafunc(oa)
+ email = email.lower()
+ except KeyError, e:
+ log.warning("Oauth signing using {0} was missing data: {1}".format(provider, e))
+ return HttpResponse('OAuth login was missing critical data. To log in, you need to allow access to email, first name and last name!')
+
+ try:
+ user = User.objects.get(email=email)
+ except User.DoesNotExist:
+ log.info("Oauth signin of {0} using {1} from {2}. User not found, offering signup.".format(email, provider, get_client_ip(request)))
+
+ # Offer the user a chance to sign up. The full flow is
+ # handled elsewhere, so store the details we got from
+ # the oauth login in the session, and pass the user on.
+ request.session['oauth_email'] = email
+ request.session['oauth_firstname'] = firstname or ''
+ request.session['oauth_lastname'] = lastname or ''
+ return HttpResponseRedirect('/account/signup/oauth/')
+
+ log.info("Oauth signin of {0} using {1} from {2}.".format(email, provider, get_client_ip(request)))
+
+ user.backend = settings.AUTHENTICATION_BACKENDS[0]
+ django_login(request, user)
+ n = request.session.pop('login_next')
+ if n:
+ return HttpResponseRedirect(n)
+ else:
+ return HttpResponseRedirect('/account/')
+ else:
+ log.info("Initiating {0} oauth2 step from {1}".format(provider, get_client_ip(request)))
+ # First step is redirect to provider
+ authorization_url, state = oa.authorization_url(
+ authurl,
+ prompt='consent',
+ )
+ request.session['login_next'] = request.GET.get('next', '')
+ request.session['oauth_state'] = state
+ request.session.modified = True
+ return HttpResponseRedirect(authorization_url)
#
# Registration: https://console.developers.google.com/apis/
#
def oauth_login_google(request):
- def _google_auth_data(oa):
- r = oa.get('https://www.googleapis.com/oauth2/v1/userinfo').json()
- if not r['verified_email']:
- raise OAuthException("The email in your google profile must be verified in order to log in")
- return (r['email'],
- r.get('given_name', ''),
- r.get('family_name', ''))
-
- return _login_oauth(
- request,
- 'google',
- 'https://accounts.google.com/o/oauth2/v2/auth',
- 'https://accounts.google.com/o/oauth2/token',
- ['https://www.googleapis.com/auth/userinfo.email',
- 'https://www.googleapis.com/auth/userinfo.profile'],
- _google_auth_data)
+ def _google_auth_data(oa):
+ r = oa.get('https://www.googleapis.com/oauth2/v1/userinfo').json()
+ if not r['verified_email']:
+ raise OAuthException("The email in your google profile must be verified in order to log in")
+ return (r['email'],
+ r.get('given_name', ''),
+ r.get('family_name', ''))
+
+ return _login_oauth(
+ request,
+ 'google',
+ 'https://accounts.google.com/o/oauth2/v2/auth',
+ 'https://accounts.google.com/o/oauth2/token',
+ ['https://www.googleapis.com/auth/userinfo.email',
+ 'https://www.googleapis.com/auth/userinfo.profile'],
+ _google_auth_data)
#
# Github login
# Registration: https://github.com/settings/developers
#
def oauth_login_github(request):
- def _github_auth_data(oa):
- # Github just returns full name, so we're just going to have to
- # split that.
- r = oa.get('https://api.github.com/user').json()
- if 'name' in r and r['name']:
- n = r['name'].split(None, 1)
- # Some accounts only have one name, extend with an empty
- # lastname, so the user can fill it out manually.
- while len(n) < 2:
- n.append('')
- else:
- # Some github accounts have no name on them, so we can just
- # let the user fill it out manually in that case.
- n = ['','']
- # Email is at a separate endpoint
- r = oa.get('https://api.github.com/user/emails').json()
- for e in r:
- if e['verified'] and e['primary']:
- return (
- e['email'],
- n[0],
- n[1],
- )
- raise OAuthException("Your GitHub profile must include a verified email address in order to log in")
-
- return _login_oauth(
- request,
- 'github',
- 'https://github.com/login/oauth/authorize',
- 'https://github.com/login/oauth/access_token',
- ['user:email', ],
- _github_auth_data)
+ def _github_auth_data(oa):
+ # Github just returns full name, so we're just going to have to
+ # split that.
+ r = oa.get('https://api.github.com/user').json()
+ if 'name' in r and r['name']:
+ n = r['name'].split(None, 1)
+ # Some accounts only have one name, extend with an empty
+ # lastname, so the user can fill it out manually.
+ while len(n) < 2:
+ n.append('')
+ else:
+ # Some github accounts have no name on them, so we can just
+ # let the user fill it out manually in that case.
+ n = ['','']
+ # Email is at a separate endpoint
+ r = oa.get('https://api.github.com/user/emails').json()
+ for e in r:
+ if e['verified'] and e['primary']:
+ return (
+ e['email'],
+ n[0],
+ n[1],
+ )
+ raise OAuthException("Your GitHub profile must include a verified email address in order to log in")
+
+ return _login_oauth(
+ request,
+ 'github',
+ 'https://github.com/login/oauth/authorize',
+ 'https://github.com/login/oauth/access_token',
+ ['user:email', ],
+ _github_auth_data)
#
# Facebook login
# Registration: https://developers.facebook.com/apps
#
def oauth_login_facebook(request):
- def _facebook_auth_data(oa):
- r = oa.get('https://graph.facebook.com/me?fields=email,first_name,last_name').json()
- if not 'email' in r:
- raise OAuthException("Your Facebook profile must provide an email address in order to log in")
+ def _facebook_auth_data(oa):
+ r = oa.get('https://graph.facebook.com/me?fields=email,first_name,last_name').json()
+ if not 'email' in r:
+ raise OAuthException("Your Facebook profile must provide an email address in order to log in")
- return (r['email'],
- r.get('first_name', ''),
- r.get('last_name', ''))
+ return (r['email'],
+ r.get('first_name', ''),
+ r.get('last_name', ''))
- return _login_oauth(
- request,
- 'facebook',
- 'https://www.facebook.com/dialog/oauth',
- 'https://graph.facebook.com/oauth/access_token',
- ['public_profile', 'email', ],
- _facebook_auth_data)
+ return _login_oauth(
+ request,
+ 'facebook',
+ 'https://www.facebook.com/dialog/oauth',
+ 'https://graph.facebook.com/oauth/access_token',
+ ['public_profile', 'email', ],
+ _facebook_auth_data)
#
# Registration: https://apps.dev.microsoft.com/
#
def oauth_login_microsoft(request):
- def _microsoft_auth_data(oa):
- r = oa.get("https://apis.live.net/v5.0/me").json()
- if not 'emails' in r or not 'account' in r['emails']:
- raise OAuthException("Your Facebook profile must provide an email address in order to log in")
+ def _microsoft_auth_data(oa):
+ r = oa.get("https://apis.live.net/v5.0/me").json()
+ if not 'emails' in r or not 'account' in r['emails']:
+ raise OAuthException("Your Facebook profile must provide an email address in order to log in")
- return (r['emails']['account'],
- r.get('first_name', ''),
- r.get('last_name', ''))
+ return (r['emails']['account'],
+ r.get('first_name', ''),
+ r.get('last_name', ''))
- return _login_oauth(
- request,
- 'microsoft',
- 'https://login.live.com/oauth20_authorize.srf',
- 'https://login.live.com/oauth20_token.srf',
- ['wl.basic', 'wl.emails' ],
- _microsoft_auth_data)
+ return _login_oauth(
+ request,
+ 'microsoft',
+ 'https://login.live.com/oauth20_authorize.srf',
+ 'https://login.live.com/oauth20_token.srf',
+ ['wl.basic', 'wl.emails' ],
+ _microsoft_auth_data)
def login_oauth(request, provider):
- fn = 'oauth_login_{0}'.format(provider)
- m = sys.modules[__name__]
- if hasattr(m, fn):
- try:
- return getattr(m, fn)(request)
- except OAuthException, e:
- return HttpResponse(e)
- except Exception, e:
- log.error('Excpetion during OAuth: %s' % e)
- return HttpResponse('An unhandled exception occurred during the authentication process')
+ fn = 'oauth_login_{0}'.format(provider)
+ m = sys.modules[__name__]
+ if hasattr(m, fn):
+ try:
+ return getattr(m, fn)(request)
+ except OAuthException, e:
+ return HttpResponse(e)
+ except Exception, e:
+ log.error('Excpetion during OAuth: %s' % e)
+ return HttpResponse('An unhandled exception occurred during the authentication process')
log = logging.getLogger(__name__)
class ReCaptchaWidget(forms.widgets.Widget):
- def render(self, name, value, attrs=None):
- if settings.NOCAPTCHA:
- return u'Captcha disabled on this system'
- log.info("Generated captcha")
- return mark_safe(u'<div class="g-recaptcha" data-sitekey="{0}"></div>'.format(settings.RECAPTCHA_SITE_KEY))
+ def render(self, name, value, attrs=None):
+ if settings.NOCAPTCHA:
+ return u'Captcha disabled on this system'
+ log.info("Generated captcha")
+ return mark_safe(u'<div class="g-recaptcha" data-sitekey="{0}"></div>'.format(settings.RECAPTCHA_SITE_KEY))
- def value_from_datadict(self, data, files, name):
- if settings.NOCAPTCHA:
- return None
- if data.has_key('g-recaptcha-response'):
- return data['g-recaptcha-response']
- return None
+ def value_from_datadict(self, data, files, name):
+ if settings.NOCAPTCHA:
+ return None
+ if data.has_key('g-recaptcha-response'):
+ return data['g-recaptcha-response']
+ return None
class ReCaptchaField(forms.CharField):
- def __init__(self, *args, **kwargs):
- self.remoteip = None
- self.widget = ReCaptchaWidget()
- self.required = not settings.NOCAPTCHA
- super(ReCaptchaField, self).__init__(*args, **kwargs)
-
- def set_ip(self, ip):
- self.remoteip = ip
-
- def clean(self, value):
- if settings.NOCAPTCHA:
- return True
-
- super(ReCaptchaField, self).clean(value)
-
- # Validate the recaptcha
- c = httplib.HTTPSConnection('www.google.com', strict=True, timeout=5)
- param = {
- 'secret': settings.RECAPTCHA_SECRET_KEY,
- 'response': value,
- }
-
- # Temporarily don't include remoteip, because it only shows our ssl terminating
- # frontends.
-# if self.remoteip:
-# param['remoteip'] = self.remoteip
-
- try:
- c.request('POST', '/recaptcha/api/siteverify', urllib.urlencode(param), {
- 'Content-type': 'application/x-www-form-urlencoded',
- })
- c.sock.settimeout(10)
- except Exception, e:
- # Error to connect at TCP level
- log.error('Failed to connect to google recaptcha API: %s' % e)
- raise ValidationError('Failed in API call to google recaptcha')
-
- try:
- r = c.getresponse()
- except:
- log.error('Failed in API call to google recaptcha')
- raise ValidationError('Failed in API call to google recaptcha')
- if r.status != 200:
- log.error('Invalid response code from google recaptcha')
- raise ValidationError('Invalid response code from google recaptcha')
-
- try:
- j = json.loads(r.read())
- except:
- log.error('Invalid response structure from google recaptcha')
- raise ValidationError('Invalid response structure from google recaptcha')
-
- if not j['success']:
- log.warning('Incorrect recaptcha entered. Trying again.')
- raise ValidationError('Invalid. Try again.')
-
- # Recaptcha validated ok!
- log.info("Successful recaptcha validation")
- return True
+ def __init__(self, *args, **kwargs):
+ self.remoteip = None
+ self.widget = ReCaptchaWidget()
+ self.required = not settings.NOCAPTCHA
+ super(ReCaptchaField, self).__init__(*args, **kwargs)
+
+ def set_ip(self, ip):
+ self.remoteip = ip
+
+ def clean(self, value):
+ if settings.NOCAPTCHA:
+ return True
+
+ super(ReCaptchaField, self).clean(value)
+
+ # Validate the recaptcha
+ c = httplib.HTTPSConnection('www.google.com', strict=True, timeout=5)
+ param = {
+ 'secret': settings.RECAPTCHA_SECRET_KEY,
+ 'response': value,
+ }
+
+ # Temporarily don't include remoteip, because it only shows our ssl terminating
+ # frontends.
+# if self.remoteip:
+# param['remoteip'] = self.remoteip
+
+ try:
+ c.request('POST', '/recaptcha/api/siteverify', urllib.urlencode(param), {
+ 'Content-type': 'application/x-www-form-urlencoded',
+ })
+ c.sock.settimeout(10)
+ except Exception, e:
+ # Error to connect at TCP level
+ log.error('Failed to connect to google recaptcha API: %s' % e)
+ raise ValidationError('Failed in API call to google recaptcha')
+
+ try:
+ r = c.getresponse()
+ except:
+ log.error('Failed in API call to google recaptcha')
+ raise ValidationError('Failed in API call to google recaptcha')
+ if r.status != 200:
+ log.error('Invalid response code from google recaptcha')
+ raise ValidationError('Invalid response code from google recaptcha')
+
+ try:
+ j = json.loads(r.read())
+ except:
+ log.error('Invalid response structure from google recaptcha')
+ raise ValidationError('Invalid response structure from google recaptcha')
+
+ if not j['success']:
+ log.warning('Incorrect recaptcha entered. Trying again.')
+ raise ValidationError('Invalid. Try again.')
+
+ # Recaptcha validated ok!
+ log.info("Successful recaptcha validation")
+ return True
import pgweb.account.oauthclient
urlpatterns = [
- url(r'^$', pgweb.account.views.home),
-
- # Community authenticatoin
- url(r'^auth/(\d+)/$', pgweb.account.views.communityauth),
- url(r'^auth/(\d+)/logout/$', pgweb.account.views.communityauth_logout),
- url(r'^auth/(\d+)/consent/$', pgweb.account.views.communityauth_consent),
- url(r'^auth/(\d+)/search/$', pgweb.account.views.communityauth_search),
- url(r'^auth/(\d+)/getkeys/(\d+/)?$', pgweb.account.views.communityauth_getkeys),
-
- # Profile
- url(r'^profile/$', pgweb.account.views.profile),
- url(r'^profile/change_email/$', pgweb.account.views.change_email),
- url(r'^profile/change_email/([0-9a-f]+)/$', pgweb.account.views.confirm_change_email),
-
- # List of items to edit
- url(r'^edit/(.*)/$', pgweb.account.views.listobjects),
-
- # News & Events
- url(r'^news/(.*)/$', pgweb.news.views.form),
- url(r'^events/(.*)/$', pgweb.events.views.form),
-
- # Software catalogue
- url(r'^organisations/(.*)/$', pgweb.core.views.organisationform),
- url(r'^products/(.*)/$', pgweb.downloads.views.productform),
-
- # Organisation information
- url(r'^orglist/$', pgweb.account.views.orglist),
-
- # Professional services
- url(r'^services/(.*)/$', pgweb.profserv.views.profservform),
-
- # Docs comments
- url(r'^comments/(new)/(.*)/(.*)/$', pgweb.docs.views.commentform),
-
- # Log in, logout, change password etc
- url(r'^login/$', pgweb.account.views.login),
- url(r'^logout/$', pgweb.account.views.logout),
- url(r'^changepwd/$', pgweb.account.views.changepwd),
- url(r'^changepwd/done/$', pgweb.account.views.change_done),
- url(r'^reset/$', pgweb.account.views.resetpwd),
- url(r'^reset/done/$', pgweb.account.views.reset_done),
- url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)-(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', pgweb.account.views.reset_confirm),
- url(r'^reset/complete/$', pgweb.account.views.reset_complete),
- url(r'^signup/$', pgweb.account.views.signup),
- url(r'^signup/complete/$', pgweb.account.views.signup_complete),
- url(r'^signup/oauth/$', pgweb.account.views.signup_oauth),
+ url(r'^$', pgweb.account.views.home),
+
+ # Community authenticatoin
+ url(r'^auth/(\d+)/$', pgweb.account.views.communityauth),
+ url(r'^auth/(\d+)/logout/$', pgweb.account.views.communityauth_logout),
+ url(r'^auth/(\d+)/consent/$', pgweb.account.views.communityauth_consent),
+ url(r'^auth/(\d+)/search/$', pgweb.account.views.communityauth_search),
+ url(r'^auth/(\d+)/getkeys/(\d+/)?$', pgweb.account.views.communityauth_getkeys),
+
+ # Profile
+ url(r'^profile/$', pgweb.account.views.profile),
+ url(r'^profile/change_email/$', pgweb.account.views.change_email),
+ url(r'^profile/change_email/([0-9a-f]+)/$', pgweb.account.views.confirm_change_email),
+
+ # List of items to edit
+ url(r'^edit/(.*)/$', pgweb.account.views.listobjects),
+
+ # News & Events
+ url(r'^news/(.*)/$', pgweb.news.views.form),
+ url(r'^events/(.*)/$', pgweb.events.views.form),
+
+ # Software catalogue
+ url(r'^organisations/(.*)/$', pgweb.core.views.organisationform),
+ url(r'^products/(.*)/$', pgweb.downloads.views.productform),
+
+ # Organisation information
+ url(r'^orglist/$', pgweb.account.views.orglist),
+
+ # Professional services
+ url(r'^services/(.*)/$', pgweb.profserv.views.profservform),
+
+ # Docs comments
+ url(r'^comments/(new)/(.*)/(.*)/$', pgweb.docs.views.commentform),
+
+ # Log in, logout, change password etc
+ url(r'^login/$', pgweb.account.views.login),
+ url(r'^logout/$', pgweb.account.views.logout),
+ url(r'^changepwd/$', pgweb.account.views.changepwd),
+ url(r'^changepwd/done/$', pgweb.account.views.change_done),
+ url(r'^reset/$', pgweb.account.views.resetpwd),
+ url(r'^reset/done/$', pgweb.account.views.reset_done),
+ url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)-(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', pgweb.account.views.reset_confirm),
+ url(r'^reset/complete/$', pgweb.account.views.reset_complete),
+ url(r'^signup/$', pgweb.account.views.signup),
+ url(r'^signup/complete/$', pgweb.account.views.signup_complete),
+ url(r'^signup/oauth/$', pgweb.account.views.signup_oauth),
]
for provider in settings.OAUTH.keys():
- urlpatterns.append(url(r'^login/({0})/$'.format(provider), pgweb.account.oauthclient.login_oauth))
+ urlpatterns.append(url(r'^login/({0})/$'.format(provider), pgweb.account.oauthclient.login_oauth))
@login_required
def home(request):
- myarticles = NewsArticle.objects.filter(org__managers=request.user, approved=False)
- myevents = Event.objects.filter(org__managers=request.user, approved=False)
- myorgs = Organisation.objects.filter(managers=request.user, approved=False)
- myproducts = Product.objects.filter(org__managers=request.user, approved=False)
- myprofservs = ProfessionalService.objects.filter(org__managers=request.user, approved=False)
- return render_pgweb(request, 'account', 'account/index.html', {
- 'newsarticles': myarticles,
- 'events': myevents,
- 'organisations': myorgs,
- 'products': myproducts,
- 'profservs': myprofservs,
- })
+ myarticles = NewsArticle.objects.filter(org__managers=request.user, approved=False)
+ myevents = Event.objects.filter(org__managers=request.user, approved=False)
+ myorgs = Organisation.objects.filter(managers=request.user, approved=False)
+ myproducts = Product.objects.filter(org__managers=request.user, approved=False)
+ myprofservs = ProfessionalService.objects.filter(org__managers=request.user, approved=False)
+ return render_pgweb(request, 'account', 'account/index.html', {
+ 'newsarticles': myarticles,
+ 'events': myevents,
+ 'organisations': myorgs,
+ 'products': myproducts,
+ 'profservs': myprofservs,
+ })
objtypes = {
- 'news': {
- 'title': 'News Article',
- 'objects': lambda u: NewsArticle.objects.filter(org__managers=u),
- },
- 'events': {
- 'title': 'Event',
- 'objects': lambda u: Event.objects.filter(org__managers=u),
+ 'news': {
+ 'title': 'News Article',
+ 'objects': lambda u: NewsArticle.objects.filter(org__managers=u),
+ },
+ 'events': {
+ 'title': 'Event',
+ 'objects': lambda u: Event.objects.filter(org__managers=u),
+ },
+ 'products': {
+ 'title': 'Product',
+ 'objects': lambda u: Product.objects.filter(org__managers=u),
+ },
+ 'services': {
+ 'title': 'Professional Service',
+ 'objects': lambda u: ProfessionalService.objects.filter(org__managers=u),
+ },
+ 'organisations': {
+ 'title': 'Organisation',
+ 'objects': lambda u: Organisation.objects.filter(managers=u),
+ 'submit_header': 'Before submitting a new Organisation, please verify on the list of <a href="/account/orglist/">current organisations</a> if the organisation already exists. If it does, please contact the manager of the organisation to gain permissions.',
},
- 'products': {
- 'title': 'Product',
- 'objects': lambda u: Product.objects.filter(org__managers=u),
- },
- 'services': {
- 'title': 'Professional Service',
- 'objects': lambda u: ProfessionalService.objects.filter(org__managers=u),
- },
- 'organisations': {
- 'title': 'Organisation',
- 'objects': lambda u: Organisation.objects.filter(managers=u),
- 'submit_header': 'Before submitting a new Organisation, please verify on the list of <a href="/account/orglist/">current organisations</a> if the organisation already exists. If it does, please contact the manager of the organisation to gain permissions.',
- },
}
@login_required
@transaction.atomic
def profile(request):
- # We always have the user, but not always the profile. And we need a bit
- # of a hack around the normal forms code since we have two different
- # models on a single form.
- (profile, created) = UserProfile.objects.get_or_create(pk=request.user.pk)
-
- # Don't allow users whose accounts were created via oauth to change
- # their email, since that would kill the connection between the
- # accounts.
- can_change_email = (request.user.password != OAUTH_PASSWORD_STORE)
-
- # We may have a contributor record - and we only show that part of the
- # form if we have it for this user.
- try:
- contrib = Contributor.objects.get(user=request.user.pk)
- except Contributor.DoesNotExist:
- contrib = None
-
- contribform = None
-
- if request.method == 'POST':
- # Process this form
- userform = UserForm(data=request.POST, instance=request.user)
- profileform = UserProfileForm(data=request.POST, instance=profile)
- if contrib:
- contribform = ContributorForm(data=request.POST, instance=contrib)
-
- if userform.is_valid() and profileform.is_valid() and (not contrib or contribform.is_valid()):
- userform.save()
- profileform.save()
- if contrib:
- contribform.save()
- return HttpResponseRedirect("/account/")
- else:
- # Generate form
- userform = UserForm(instance=request.user)
- profileform = UserProfileForm(instance=profile)
- if contrib:
- contribform = ContributorForm(instance=contrib)
-
- return render_pgweb(request, 'account', 'account/userprofileform.html', {
- 'userform': userform,
- 'profileform': profileform,
- 'contribform': contribform,
- 'can_change_email': can_change_email,
- })
+ # We always have the user, but not always the profile. And we need a bit
+ # of a hack around the normal forms code since we have two different
+ # models on a single form.
+ (profile, created) = UserProfile.objects.get_or_create(pk=request.user.pk)
+
+ # Don't allow users whose accounts were created via oauth to change
+ # their email, since that would kill the connection between the
+ # accounts.
+ can_change_email = (request.user.password != OAUTH_PASSWORD_STORE)
+
+ # We may have a contributor record - and we only show that part of the
+ # form if we have it for this user.
+ try:
+ contrib = Contributor.objects.get(user=request.user.pk)
+ except Contributor.DoesNotExist:
+ contrib = None
+
+ contribform = None
+
+ if request.method == 'POST':
+ # Process this form
+ userform = UserForm(data=request.POST, instance=request.user)
+ profileform = UserProfileForm(data=request.POST, instance=profile)
+ if contrib:
+ contribform = ContributorForm(data=request.POST, instance=contrib)
+
+ if userform.is_valid() and profileform.is_valid() and (not contrib or contribform.is_valid()):
+ userform.save()
+ profileform.save()
+ if contrib:
+ contribform.save()
+ return HttpResponseRedirect("/account/")
+ else:
+ # Generate form
+ userform = UserForm(instance=request.user)
+ profileform = UserProfileForm(instance=profile)
+ if contrib:
+ contribform = ContributorForm(instance=contrib)
+
+ return render_pgweb(request, 'account', 'account/userprofileform.html', {
+ 'userform': userform,
+ 'profileform': profileform,
+ 'contribform': contribform,
+ 'can_change_email': can_change_email,
+ })
@login_required
@transaction.atomic
def change_email(request):
- tokens = EmailChangeToken.objects.filter(user=request.user)
- token = len(tokens) and tokens[0] or None
-
- if request.user.password == OAUTH_PASSWORD_STORE:
- # Link shouldn't exist in this case, so just throw an unfriendly
- # error message.
- return HttpServerError(request, "This account cannot change email address as it's connected to a third party login site.")
-
- if request.method == 'POST':
- form = ChangeEmailForm(request.user, data=request.POST)
- if form.is_valid():
- # If there is an existing token, delete it
- if token:
- token.delete()
-
- # Create a new token
- token = EmailChangeToken(user=request.user,
- email=form.cleaned_data['email'].lower(),
- token=generate_random_token())
- token.save()
-
- send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
- form.cleaned_data['email'],
- 'Your postgresql.org community account',
- 'account/email_change_email.txt',
- { 'token': token , 'user': request.user, }
- )
- return HttpResponseRedirect('done/')
- else:
- form = ChangeEmailForm(request.user)
-
- return render_pgweb(request, 'account', 'account/emailchangeform.html', {
- 'form': form,
- 'token': token,
- })
+ tokens = EmailChangeToken.objects.filter(user=request.user)
+ token = len(tokens) and tokens[0] or None
+
+ if request.user.password == OAUTH_PASSWORD_STORE:
+ # Link shouldn't exist in this case, so just throw an unfriendly
+ # error message.
+ return HttpServerError(request, "This account cannot change email address as it's connected to a third party login site.")
+
+ if request.method == 'POST':
+ form = ChangeEmailForm(request.user, data=request.POST)
+ if form.is_valid():
+ # If there is an existing token, delete it
+ if token:
+ token.delete()
+
+ # Create a new token
+ token = EmailChangeToken(user=request.user,
+ email=form.cleaned_data['email'].lower(),
+ token=generate_random_token())
+ token.save()
+
+ send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
+ form.cleaned_data['email'],
+ 'Your postgresql.org community account',
+ 'account/email_change_email.txt',
+ { 'token': token , 'user': request.user, }
+ )
+ return HttpResponseRedirect('done/')
+ else:
+ form = ChangeEmailForm(request.user)
+
+ return render_pgweb(request, 'account', 'account/emailchangeform.html', {
+ 'form': form,
+ 'token': token,
+ })
@login_required
@transaction.atomic
def confirm_change_email(request, tokenhash):
- tokens = EmailChangeToken.objects.filter(user=request.user, token=tokenhash)
- token = len(tokens) and tokens[0] or None
+ tokens = EmailChangeToken.objects.filter(user=request.user, token=tokenhash)
+ token = len(tokens) and tokens[0] or None
- if request.user.password == OAUTH_PASSWORD_STORE:
- # Link shouldn't exist in this case, so just throw an unfriendly
- # error message.
- return HttpServerError(request, "This account cannot change email address as it's connected to a third party login site.")
+ if request.user.password == OAUTH_PASSWORD_STORE:
+ # Link shouldn't exist in this case, so just throw an unfriendly
+ # error message.
+ return HttpServerError(request, "This account cannot change email address as it's connected to a third party login site.")
- if token:
- # Valid token find, so change the email address
- request.user.email = token.email.lower()
- request.user.save()
- token.delete()
+ if token:
+ # Valid token find, so change the email address
+ request.user.email = token.email.lower()
+ request.user.save()
+ token.delete()
- return render_pgweb(request, 'account', 'account/emailchangecompleted.html', {
- 'token': tokenhash,
- 'success': token and True or False,
- })
+ return render_pgweb(request, 'account', 'account/emailchangecompleted.html', {
+ 'token': tokenhash,
+ 'success': token and True or False,
+ })
@login_required
def listobjects(request, objtype):
- if not objtypes.has_key(objtype):
- raise Http404("Object type not found")
- o = objtypes[objtype]
-
- return render_pgweb(request, 'account', 'account/objectlist.html', {
- 'objects': {
- 'approved': o['objects'](request.user).filter(approved=True),
- 'unapproved': o['objects'](request.user).filter(approved=False),
- },
- 'title': o['title'],
- 'submit_header': o.has_key('submit_header') and o['submit_header'] or None,
- 'suburl': objtype,
- })
+ if not objtypes.has_key(objtype):
+ raise Http404("Object type not found")
+ o = objtypes[objtype]
+
+ return render_pgweb(request, 'account', 'account/objectlist.html', {
+ 'objects': {
+ 'approved': o['objects'](request.user).filter(approved=True),
+ 'unapproved': o['objects'](request.user).filter(approved=False),
+ },
+ 'title': o['title'],
+ 'submit_header': o.has_key('submit_header') and o['submit_header'] or None,
+ 'suburl': objtype,
+ })
@login_required
def orglist(request):
- orgs = Organisation.objects.filter(approved=True)
+ orgs = Organisation.objects.filter(approved=True)
- return render_pgweb(request, 'account', 'account/orglist.html', {
- 'orgs': orgs,
- })
+ return render_pgweb(request, 'account', 'account/orglist.html', {
+ 'orgs': orgs,
+ })
def login(request):
- return authviews.login(request, template_name='account/login.html',
- authentication_form=PgwebAuthenticationForm,
- extra_context={
- 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())],
- })
+ return authviews.login(request, template_name='account/login.html',
+ authentication_form=PgwebAuthenticationForm,
+ extra_context={
+ 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())],
+ })
def logout(request):
- return authviews.logout_then_login(request, login_url='/')
+ return authviews.logout_then_login(request, login_url='/')
def changepwd(request):
- if hasattr(request.user, 'password') and request.user.password == OAUTH_PASSWORD_STORE:
- return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.")
+ if hasattr(request.user, 'password') and request.user.password == OAUTH_PASSWORD_STORE:
+ return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.")
- log.info("Initiating password change from {0}".format(get_client_ip(request)))
- return authviews.password_change(request,
- template_name='account/password_change.html',
- post_change_redirect='/account/changepwd/done/')
+ log.info("Initiating password change from {0}".format(get_client_ip(request)))
+ return authviews.password_change(request,
+ template_name='account/password_change.html',
+ post_change_redirect='/account/changepwd/done/')
def resetpwd(request):
- # Basic django password reset feature is completely broken. For example, it does not support
- # resetting passwords for users with "old hashes", which means they have no way to ever
- # recover. So implement our own, since it's quite the trivial feature.
- if request.method == "POST":
- try:
- u = User.objects.get(email__iexact=request.POST['email'])
- if u.password == OAUTH_PASSWORD_STORE:
- return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.")
- except User.DoesNotExist:
- log.info("Attempting to reset password of {0}, user not found".format(request.POST['email']))
- return HttpResponseRedirect('/account/reset/done/')
-
- form = PgwebPasswordResetForm(data=request.POST)
- if form.is_valid():
- log.info("Initiating password set from {0} for {1}".format(get_client_ip(request), form.cleaned_data['email']))
- token = default_token_generator.make_token(u)
- send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
- form.cleaned_data['email'],
- 'Password reset for your postgresql.org account',
- 'account/password_reset_email.txt',
- {
- 'user': u,
- 'uid': urlsafe_base64_encode(force_bytes(u.pk)),
- 'token': token,
- },
- )
- return HttpResponseRedirect('/account/reset/done/')
- else:
- form = PgwebPasswordResetForm()
-
- return render_pgweb(request, 'account', 'account/password_reset.html', {
- 'form': form,
- })
+ # Basic django password reset feature is completely broken. For example, it does not support
+ # resetting passwords for users with "old hashes", which means they have no way to ever
+ # recover. So implement our own, since it's quite the trivial feature.
+ if request.method == "POST":
+ try:
+ u = User.objects.get(email__iexact=request.POST['email'])
+ if u.password == OAUTH_PASSWORD_STORE:
+ return HttpServerError(request, "This account cannot change password as it's connected to a third party login site.")
+ except User.DoesNotExist:
+ log.info("Attempting to reset password of {0}, user not found".format(request.POST['email']))
+ return HttpResponseRedirect('/account/reset/done/')
+
+ form = PgwebPasswordResetForm(data=request.POST)
+ if form.is_valid():
+ log.info("Initiating password set from {0} for {1}".format(get_client_ip(request), form.cleaned_data['email']))
+ token = default_token_generator.make_token(u)
+ send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
+ form.cleaned_data['email'],
+ 'Password reset for your postgresql.org account',
+ 'account/password_reset_email.txt',
+ {
+ 'user': u,
+ 'uid': urlsafe_base64_encode(force_bytes(u.pk)),
+ 'token': token,
+ },
+ )
+ return HttpResponseRedirect('/account/reset/done/')
+ else:
+ form = PgwebPasswordResetForm()
+
+ return render_pgweb(request, 'account', 'account/password_reset.html', {
+ 'form': form,
+ })
def change_done(request):
- log.info("Password change done from {0}".format(get_client_ip(request)))
- return authviews.password_change_done(request, template_name='account/password_change_done.html')
+ log.info("Password change done from {0}".format(get_client_ip(request)))
+ return authviews.password_change_done(request, template_name='account/password_change_done.html')
def reset_done(request):
- log.info("Password reset done from {0}".format(get_client_ip(request)))
- return authviews.password_reset_done(request, template_name='account/password_reset_done.html')
+ log.info("Password reset done from {0}".format(get_client_ip(request)))
+ return authviews.password_reset_done(request, template_name='account/password_reset_done.html')
def reset_confirm(request, uidb64, token):
- log.info("Confirming password reset for uidb {0}, token {1} from {2}".format(uidb64, token, get_client_ip(request)))
- return authviews.password_reset_confirm(request,
- uidb64=uidb64,
- token=token,
- template_name='account/password_reset_confirm.html',
- post_reset_redirect='/account/reset/complete/')
+ log.info("Confirming password reset for uidb {0}, token {1} from {2}".format(uidb64, token, get_client_ip(request)))
+ return authviews.password_reset_confirm(request,
+ uidb64=uidb64,
+ token=token,
+ template_name='account/password_reset_confirm.html',
+ post_reset_redirect='/account/reset/complete/')
def reset_complete(request):
- log.info("Password reset completed for user from {0}".format(get_client_ip(request)))
- return authviews.password_reset_complete(request, template_name='account/password_reset_complete.html')
+ log.info("Password reset completed for user from {0}".format(get_client_ip(request)))
+ return authviews.password_reset_complete(request, template_name='account/password_reset_complete.html')
@script_sources('https://www.google.com/recaptcha/')
@script_sources('https://www.gstatic.com/recaptcha/')
@frame_sources('https://www.google.com/')
def signup(request):
- if request.user.is_authenticated():
- return HttpServerError(request, "You must log out before you can sign up for a new account")
-
- if request.method == 'POST':
- # Attempt to create user then, eh?
- form = SignupForm(get_client_ip(request), data=request.POST)
- if form.is_valid():
- # Attempt to create the user here
- # XXX: Do we need to validate something else?
- log.info("Creating user for {0} from {1}".format(form.cleaned_data['username'], get_client_ip(request)))
-
- user = User.objects.create_user(form.cleaned_data['username'].lower(), form.cleaned_data['email'].lower(), last_login=datetime.now())
- user.first_name = form.cleaned_data['first_name']
- user.last_name = form.cleaned_data['last_name']
-
- # generate a random value for password. It won't be possible to log in with it, but
- # it creates more entropy for the token generator (I think).
- user.password = generate_random_token()
- user.save()
-
- # Now generate a token
- token = default_token_generator.make_token(user)
- log.info("Generated token {0} for user {1} from {2}".format(token, form.cleaned_data['username'], get_client_ip(request)))
-
- # Generate an outgoing email
- send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
- form.cleaned_data['email'],
- 'Your new postgresql.org community account',
- 'account/new_account_email.txt',
- { 'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user}
- )
-
- return HttpResponseRedirect('/account/signup/complete/')
- else:
- form = SignupForm(get_client_ip(request))
-
- return render_pgweb(request, 'account', 'base/form.html', {
- 'form': form,
- 'formitemtype': 'Account',
- 'form_intro': """
+ if request.user.is_authenticated():
+ return HttpServerError(request, "You must log out before you can sign up for a new account")
+
+ if request.method == 'POST':
+ # Attempt to create user then, eh?
+ form = SignupForm(get_client_ip(request), data=request.POST)
+ if form.is_valid():
+ # Attempt to create the user here
+ # XXX: Do we need to validate something else?
+ log.info("Creating user for {0} from {1}".format(form.cleaned_data['username'], get_client_ip(request)))
+
+ user = User.objects.create_user(form.cleaned_data['username'].lower(), form.cleaned_data['email'].lower(), last_login=datetime.now())
+ user.first_name = form.cleaned_data['first_name']
+ user.last_name = form.cleaned_data['last_name']
+
+ # generate a random value for password. It won't be possible to log in with it, but
+ # it creates more entropy for the token generator (I think).
+ user.password = generate_random_token()
+ user.save()
+
+ # Now generate a token
+ token = default_token_generator.make_token(user)
+ log.info("Generated token {0} for user {1} from {2}".format(token, form.cleaned_data['username'], get_client_ip(request)))
+
+ # Generate an outgoing email
+ send_template_mail(settings.ACCOUNTS_NOREPLY_FROM,
+ form.cleaned_data['email'],
+ 'Your new postgresql.org community account',
+ 'account/new_account_email.txt',
+ { 'uid': urlsafe_base64_encode(force_bytes(user.id)), 'token': token, 'user': user}
+ )
+
+ return HttpResponseRedirect('/account/signup/complete/')
+ else:
+ form = SignupForm(get_client_ip(request))
+
+ return render_pgweb(request, 'account', 'base/form.html', {
+ 'form': form,
+ 'formitemtype': 'Account',
+ 'form_intro': """
To sign up for a free community account, enter your preferred userid and email address.
Note that a community account is only needed if you want to submit information - all
content is available for reading without an account.
""",
- 'savebutton': 'Sign up',
- 'operation': 'New',
- 'recaptcha': True,
- })
+ 'savebutton': 'Sign up',
+ 'operation': 'New',
+ 'recaptcha': True,
+ })
def signup_complete(request):
- return render_pgweb(request, 'account', 'account/signup_complete.html', {
- })
+ return render_pgweb(request, 'account', 'account/signup_complete.html', {
+ })
@script_sources('https://www.google.com/recaptcha/')
@frame_sources('https://www.google.com/')
@transaction.atomic
def signup_oauth(request):
- if not request.session.has_key('oauth_email') \
- or not request.session.has_key('oauth_firstname') \
- or not request.session.has_key('oauth_lastname'):
- return HttpServerError(request, 'Invalid redirect received')
-
- if request.method == 'POST':
- # Second stage, so create the account. But verify that the
- # nonce matches.
- data = request.POST.copy()
- data['email'] = request.session['oauth_email'].lower()
- data['first_name'] = request.session['oauth_firstname']
- data['last_name'] = request.session['oauth_lastname']
- form = SignupOauthForm(data=data)
- if form.is_valid():
- log.info("Creating user for {0} from {1} from oauth signin of email {2}".format(form.cleaned_data['username'], get_client_ip(request), request.session['oauth_email']))
-
- user = User.objects.create_user(form.cleaned_data['username'].lower(),
- request.session['oauth_email'].lower(),
- last_login=datetime.now())
- user.first_name = request.session['oauth_firstname']
- user.last_name = request.session['oauth_lastname']
- user.password = OAUTH_PASSWORD_STORE
- user.save()
-
- # Clean up our session
- del request.session['oauth_email']
- del request.session['oauth_firstname']
- del request.session['oauth_lastname']
- request.session.modified = True
-
- # We can immediately log the user in because their email
- # is confirmed.
- user.backend = settings.AUTHENTICATION_BACKENDS[0]
- django_login(request, user)
-
- # Redirect to the sessions page, or to the account page
- # if none was given.
- return HttpResponseRedirect(request.session.pop('login_next', '/account/'))
- elif request.GET.has_key('do_abort'):
- del request.session['oauth_email']
- del request.session['oauth_firstname']
- del request.session['oauth_lastname']
- request.session.modified = True
- return HttpResponseRedirect(request.session.pop('login_next', '/'))
- else:
- # Generate possible new username
- suggested_username = request.session['oauth_email'].replace('@', '.')[:30]
-
- # Auto generation requires firstname and lastname to be specified
- f = request.session['oauth_firstname'].lower()
- l = request.session['oauth_lastname'].lower()
- if f and l:
- for u in itertools.chain([
- u"{0}{1}".format(f, l[0]),
- u"{0}{1}".format(f[0], l),
- ], (u"{0}{1}{2}".format(f, l[0], n) for n in xrange(100))):
- if not User.objects.filter(username=u[:30]).exists():
- suggested_username = u[:30]
- break
-
- form = SignupOauthForm(initial={
- 'username': suggested_username,
- 'email': request.session['oauth_email'].lower(),
- 'first_name': request.session['oauth_firstname'][:30],
- 'last_name': request.session['oauth_lastname'][:30],
- })
-
- return render_pgweb(request, 'account', 'account/signup_oauth.html', {
- 'form': form,
- 'operation': 'New account',
- 'savebutton': 'Sign up for new account',
- 'recaptcha': True,
- })
+ if not request.session.has_key('oauth_email') \
+ or not request.session.has_key('oauth_firstname') \
+ or not request.session.has_key('oauth_lastname'):
+ return HttpServerError(request, 'Invalid redirect received')
+
+ if request.method == 'POST':
+ # Second stage, so create the account. But verify that the
+ # nonce matches.
+ data = request.POST.copy()
+ data['email'] = request.session['oauth_email'].lower()
+ data['first_name'] = request.session['oauth_firstname']
+ data['last_name'] = request.session['oauth_lastname']
+ form = SignupOauthForm(data=data)
+ if form.is_valid():
+ log.info("Creating user for {0} from {1} from oauth signin of email {2}".format(form.cleaned_data['username'], get_client_ip(request), request.session['oauth_email']))
+
+ user = User.objects.create_user(form.cleaned_data['username'].lower(),
+ request.session['oauth_email'].lower(),
+ last_login=datetime.now())
+ user.first_name = request.session['oauth_firstname']
+ user.last_name = request.session['oauth_lastname']
+ user.password = OAUTH_PASSWORD_STORE
+ user.save()
+
+ # Clean up our session
+ del request.session['oauth_email']
+ del request.session['oauth_firstname']
+ del request.session['oauth_lastname']
+ request.session.modified = True
+
+ # We can immediately log the user in because their email
+ # is confirmed.
+ user.backend = settings.AUTHENTICATION_BACKENDS[0]
+ django_login(request, user)
+
+ # Redirect to the sessions page, or to the account page
+ # if none was given.
+ return HttpResponseRedirect(request.session.pop('login_next', '/account/'))
+ elif request.GET.has_key('do_abort'):
+ del request.session['oauth_email']
+ del request.session['oauth_firstname']
+ del request.session['oauth_lastname']
+ request.session.modified = True
+ return HttpResponseRedirect(request.session.pop('login_next', '/'))
+ else:
+ # Generate possible new username
+ suggested_username = request.session['oauth_email'].replace('@', '.')[:30]
+
+ # Auto generation requires firstname and lastname to be specified
+ f = request.session['oauth_firstname'].lower()
+ l = request.session['oauth_lastname'].lower()
+ if f and l:
+ for u in itertools.chain([
+ u"{0}{1}".format(f, l[0]),
+ u"{0}{1}".format(f[0], l),
+ ], (u"{0}{1}{2}".format(f, l[0], n) for n in xrange(100))):
+ if not User.objects.filter(username=u[:30]).exists():
+ suggested_username = u[:30]
+ break
+
+ form = SignupOauthForm(initial={
+ 'username': suggested_username,
+ 'email': request.session['oauth_email'].lower(),
+ 'first_name': request.session['oauth_firstname'][:30],
+ 'last_name': request.session['oauth_lastname'][:30],
+ })
+
+ return render_pgweb(request, 'account', 'account/signup_oauth.html', {
+ 'form': form,
+ 'operation': 'New account',
+ 'savebutton': 'Sign up for new account',
+ 'recaptcha': True,
+ })
####
## Community authentication endpoint
####
def communityauth(request, siteid):
- # Get whatever site the user is trying to log in to.
- site = get_object_or_404(CommunityAuthSite, pk=siteid)
-
- # "suburl" - old style way of passing parameters
- # deprecated - will be removed once all sites have migrated
- if request.GET.has_key('su'):
- su = request.GET['su']
- if not su.startswith('/'):
- su = None
- else:
- su = None
-
- # "data" - new style way of passing parameter, where we only
- # care that it's characters are what's in base64.
- if request.GET.has_key('d'):
- d = request.GET['d']
- if d != urllib.quote_plus(d, '=$'):
- # Invalid character, so drop it
- d = None
- else:
- d = None
-
- if d:
- urldata = "?d=%s" % d
- elif su:
- urldata = "?su=%s" % su
- else:
- urldata = ""
-
- # Verify if the user is authenticated, and if he/she is not, generate
- # a login form that has information about which site is being logged
- # in to, and basic information about how the community login system
- # works.
- if not request.user.is_authenticated():
- if request.method == "POST" and 'next' in request.POST and 'this_is_the_login_form' in request.POST:
- # This is a postback of the login form. So pick the next filed
- # from that one, so we keep it across invalid password entries.
- nexturl = request.POST['next']
- else:
- nexturl = '/account/auth/%s/%s' % (siteid, urldata)
- return authviews.login(request, template_name='account/login.html',
- authentication_form=PgwebAuthenticationForm,
- extra_context={
- 'sitename': site.name,
- 'next': nexturl,
- 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())],
- },
- )
-
- # When we reach this point, the user *has* already been authenticated.
- # The request variable "su" *may* contain a suburl and should in that
- # case be passed along to the site we're authenticating for. And of
- # course, we fill a structure with information about the user.
-
- if request.user.first_name=='' or request.user.last_name=='' or request.user.email=='':
- return render_pgweb(request, 'account', 'account/communityauth_noinfo.html', {
- })
-
- # Check for cooloff period
- if site.cooloff_hours > 0:
- if (datetime.now() - request.user.date_joined) < timedelta(hours=site.cooloff_hours):
- log.warning("User {0} tried to log in to {1} before cooloff period ended.".format(
- request.user.username, site.name))
- return render_pgweb(request, 'account', 'account/communityauth_cooloff.html', {
- 'site': site,
- })
-
- if site.org.require_consent:
- if not CommunityAuthConsent.objects.filter(org=site.org, user=request.user).exists():
- return HttpResponseRedirect('/account/auth/{0}/consent/?{1}'.format(siteid,
- urllib.urlencode({'next': '/account/auth/{0}/{1}'.format(siteid, urldata)})))
-
- info = {
- 'u': request.user.username.encode('utf-8'),
- 'f': request.user.first_name.encode('utf-8'),
- 'l': request.user.last_name.encode('utf-8'),
- 'e': request.user.email.encode('utf-8'),
- }
- if d:
- info['d'] = d.encode('utf-8')
- elif su:
- info['su'] = su.encode('utf-8')
-
- # Turn this into an URL. Make sure the timestamp is always first, that makes
- # the first block more random..
- s = "t=%s&%s" % (int(time.time()), urllib.urlencode(info))
-
- # Encrypt it with the shared key (and IV!)
- r = Random.new()
- iv = r.read(16) # Always 16 bytes for AES
- encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
- cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
-
- # Generate redirect
- return HttpResponseRedirect("%s?i=%s&d=%s" % (
- site.redirecturl,
- base64.b64encode(iv, "-_"),
- base64.b64encode(cipher, "-_"),
- ))
+ # Get whatever site the user is trying to log in to.
+ site = get_object_or_404(CommunityAuthSite, pk=siteid)
+
+ # "suburl" - old style way of passing parameters
+ # deprecated - will be removed once all sites have migrated
+ if request.GET.has_key('su'):
+ su = request.GET['su']
+ if not su.startswith('/'):
+ su = None
+ else:
+ su = None
+
+ # "data" - new style way of passing parameter, where we only
+ # care that it's characters are what's in base64.
+ if request.GET.has_key('d'):
+ d = request.GET['d']
+ if d != urllib.quote_plus(d, '=$'):
+ # Invalid character, so drop it
+ d = None
+ else:
+ d = None
+
+ if d:
+ urldata = "?d=%s" % d
+ elif su:
+ urldata = "?su=%s" % su
+ else:
+ urldata = ""
+
+ # Verify if the user is authenticated, and if he/she is not, generate
+ # a login form that has information about which site is being logged
+ # in to, and basic information about how the community login system
+ # works.
+ if not request.user.is_authenticated():
+ if request.method == "POST" and 'next' in request.POST and 'this_is_the_login_form' in request.POST:
+ # This is a postback of the login form. So pick the next filed
+ # from that one, so we keep it across invalid password entries.
+ nexturl = request.POST['next']
+ else:
+ nexturl = '/account/auth/%s/%s' % (siteid, urldata)
+ return authviews.login(request, template_name='account/login.html',
+ authentication_form=PgwebAuthenticationForm,
+ extra_context={
+ 'sitename': site.name,
+ 'next': nexturl,
+ 'oauth_providers': [(k,v) for k,v in sorted(settings.OAUTH.items())],
+ },
+ )
+
+ # When we reach this point, the user *has* already been authenticated.
+ # The request variable "su" *may* contain a suburl and should in that
+ # case be passed along to the site we're authenticating for. And of
+ # course, we fill a structure with information about the user.
+
+ if request.user.first_name=='' or request.user.last_name=='' or request.user.email=='':
+ return render_pgweb(request, 'account', 'account/communityauth_noinfo.html', {
+ })
+
+ # Check for cooloff period
+ if site.cooloff_hours > 0:
+ if (datetime.now() - request.user.date_joined) < timedelta(hours=site.cooloff_hours):
+ log.warning("User {0} tried to log in to {1} before cooloff period ended.".format(
+ request.user.username, site.name))
+ return render_pgweb(request, 'account', 'account/communityauth_cooloff.html', {
+ 'site': site,
+ })
+
+ if site.org.require_consent:
+ if not CommunityAuthConsent.objects.filter(org=site.org, user=request.user).exists():
+ return HttpResponseRedirect('/account/auth/{0}/consent/?{1}'.format(siteid,
+ urllib.urlencode({'next': '/account/auth/{0}/{1}'.format(siteid, urldata)})))
+
+ info = {
+ 'u': request.user.username.encode('utf-8'),
+ 'f': request.user.first_name.encode('utf-8'),
+ 'l': request.user.last_name.encode('utf-8'),
+ 'e': request.user.email.encode('utf-8'),
+ }
+ if d:
+ info['d'] = d.encode('utf-8')
+ elif su:
+ info['su'] = su.encode('utf-8')
+
+ # Turn this into an URL. Make sure the timestamp is always first, that makes
+ # the first block more random..
+ s = "t=%s&%s" % (int(time.time()), urllib.urlencode(info))
+
+ # Encrypt it with the shared key (and IV!)
+ r = Random.new()
+ iv = r.read(16) # Always 16 bytes for AES
+ encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
+ cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
+
+ # Generate redirect
+ return HttpResponseRedirect("%s?i=%s&d=%s" % (
+ site.redirecturl,
+ base64.b64encode(iv, "-_"),
+ base64.b64encode(cipher, "-_"),
+ ))
def communityauth_logout(request, siteid):
- # Get whatever site the user is trying to log in to.
- site = get_object_or_404(CommunityAuthSite, pk=siteid)
+ # Get whatever site the user is trying to log in to.
+ site = get_object_or_404(CommunityAuthSite, pk=siteid)
- if request.user.is_authenticated():
- django_logout(request)
+ if request.user.is_authenticated():
+ django_logout(request)
- # Redirect user back to the specified suburl
- return HttpResponseRedirect("%s?s=logout" % site.redirecturl)
+ # Redirect user back to the specified suburl
+ return HttpResponseRedirect("%s?s=logout" % site.redirecturl)
@login_required
def communityauth_consent(request, siteid):
- org = get_object_or_404(CommunityAuthSite, id=siteid).org
- if request.method == 'POST':
- form = CommunityAuthConsentForm(org.orgname, data=request.POST)
- if form.is_valid():
- CommunityAuthConsent.objects.get_or_create(user=request.user, org=org,
- defaults={'consentgiven':datetime.now()},
- )
- return HttpResponseRedirect(form.cleaned_data['next'])
- else:
- form = CommunityAuthConsentForm(org.orgname, initial={'next': request.GET.get('next', '')})
-
- return render_pgweb(request, 'account', 'base/form.html', {
- 'form': form,
- 'operation': 'Authentication',
- 'form_intro': 'The site you are about to log into is run by {0}. If you choose to proceed with this authentication, your name and email address will be shared with <em>{1}</em>.</p><p>Please confirm that you consent to this sharing.'.format(org.orgname, org.orgname),
- 'savebutton': 'Proceed with login',
- })
+ org = get_object_or_404(CommunityAuthSite, id=siteid).org
+ if request.method == 'POST':
+ form = CommunityAuthConsentForm(org.orgname, data=request.POST)
+ if form.is_valid():
+ CommunityAuthConsent.objects.get_or_create(user=request.user, org=org,
+ defaults={'consentgiven':datetime.now()},
+ )
+ return HttpResponseRedirect(form.cleaned_data['next'])
+ else:
+ form = CommunityAuthConsentForm(org.orgname, initial={'next': request.GET.get('next', '')})
+
+ return render_pgweb(request, 'account', 'base/form.html', {
+ 'form': form,
+ 'operation': 'Authentication',
+ 'form_intro': 'The site you are about to log into is run by {0}. If you choose to proceed with this authentication, your name and email address will be shared with <em>{1}</em>.</p><p>Please confirm that you consent to this sharing.'.format(org.orgname, org.orgname),
+ 'savebutton': 'Proceed with login',
+ })
def _encrypt_site_response(site, s):
- # Encrypt it with the shared key (and IV!)
- r = Random.new()
- iv = r.read(16) # Always 16 bytes for AES
- encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
- cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
-
- # Base64-encode the response, just to be consistent
- return "%s&%s" % (
- base64.b64encode(iv, '-_'),
- base64.b64encode(cipher, '-_'),
- )
+ # Encrypt it with the shared key (and IV!)
+ r = Random.new()
+ iv = r.read(16) # Always 16 bytes for AES
+ encryptor = AES.new(base64.b64decode(site.cryptkey), AES.MODE_CBC, iv)
+ cipher = encryptor.encrypt(s + ' ' * (16-(len(s) % 16))) #Pad to even 16 bytes
+
+ # Base64-encode the response, just to be consistent
+ return "%s&%s" % (
+ base64.b64encode(iv, '-_'),
+ base64.b64encode(cipher, '-_'),
+ )
def communityauth_search(request, siteid):
- # Perform a search for users. The response will be encrypted with the site
- # key to prevent abuse, therefor we need the site.
- site = get_object_or_404(CommunityAuthSite, pk=siteid)
+ # Perform a search for users. The response will be encrypted with the site
+ # key to prevent abuse, therefor we need the site.
+ site = get_object_or_404(CommunityAuthSite, pk=siteid)
- q = Q(is_active=True)
- if request.GET.has_key('s') and request.GET['s']:
- # General search term, match both name and email
- q = q & (Q(email__icontains=request.GET['s']) | Q(first_name__icontains=request.GET['s']) | Q(last_name__icontains=request.GET['s']))
- elif request.GET.has_key('e') and request.GET['e']:
- q = q & Q(email__icontains=request.GET['e'])
- elif request.GET.has_key('n') and request.GET['n']:
- q = q & (Q(first_name__icontains=request.GET['n']) | Q(last_name__icontains=request.GET['n']))
- elif request.GET.has_key('u') and request.GET['u']:
- q = q & Q(username=request.GET['u'])
- else:
- raise Http404('No search term specified')
+ q = Q(is_active=True)
+ if request.GET.has_key('s') and request.GET['s']:
+ # General search term, match both name and email
+ q = q & (Q(email__icontains=request.GET['s']) | Q(first_name__icontains=request.GET['s']) | Q(last_name__icontains=request.GET['s']))
+ elif request.GET.has_key('e') and request.GET['e']:
+ q = q & Q(email__icontains=request.GET['e'])
+ elif request.GET.has_key('n') and request.GET['n']:
+ q = q & (Q(first_name__icontains=request.GET['n']) | Q(last_name__icontains=request.GET['n']))
+ elif request.GET.has_key('u') and request.GET['u']:
+ q = q & Q(username=request.GET['u'])
+ else:
+ raise Http404('No search term specified')
- users = User.objects.filter(q)
+ users = User.objects.filter(q)
- j = json.dumps([{'u': u.username, 'e': u.email, 'f': u.first_name, 'l': u.last_name} for u in users])
+ j = json.dumps([{'u': u.username, 'e': u.email, 'f': u.first_name, 'l': u.last_name} for u in users])
- return HttpResponse(_encrypt_site_response(site, j))
+ return HttpResponse(_encrypt_site_response(site, j))
def communityauth_getkeys(request, siteid, since=None):
- # Get any updated ssh keys for community accounts.
- # The response will be encrypted with the site key to prevent abuse,
- # therefor we need the site.
- site = get_object_or_404(CommunityAuthSite, pk=siteid)
+ # Get any updated ssh keys for community accounts.
+ # The response will be encrypted with the site key to prevent abuse,
+ # therefor we need the site.
+ site = get_object_or_404(CommunityAuthSite, pk=siteid)
- if since:
- keys = UserProfile.objects.select_related('user').filter(lastmodified__gte=datetime.fromtimestamp(int(since.replace('/', '')))).exclude(sshkey='')
- else:
- keys = UserProfile.objects.select_related('user').all().exclude(sshkey='')
+ if since:
+ keys = UserProfile.objects.select_related('user').filter(lastmodified__gte=datetime.fromtimestamp(int(since.replace('/', '')))).exclude(sshkey='')
+ else:
+ keys = UserProfile.objects.select_related('user').all().exclude(sshkey='')
- j = json.dumps([{'u': k.user.username, 's': k.sshkey} for k in keys])
+ j = json.dumps([{'u': k.user.username, 's': k.sshkey} for k in keys])
- return HttpResponse(_encrypt_site_response(site, j))
+ return HttpResponse(_encrypt_site_response(site, j))
from models import Contributor, ContributorType
class ContributorAdminForm(forms.ModelForm):
- class Meta:
- model = Contributor
- exclude = ()
- widgets = {
- 'user': AutoCompleteSelectWidget(lookup_class=UserLookup),
- }
+ class Meta:
+ model = Contributor
+ exclude = ()
+ widgets = {
+ 'user': AutoCompleteSelectWidget(lookup_class=UserLookup),
+ }
- def __init__(self, *args, **kwargs):
- super(ContributorAdminForm, self).__init__(*args, **kwargs)
- self.fields['user'].widget.can_add_related = False
- self.fields['user'].widget.can_change_related = False
+ def __init__(self, *args, **kwargs):
+ super(ContributorAdminForm, self).__init__(*args, **kwargs)
+ self.fields['user'].widget.can_add_related = False
+ self.fields['user'].widget.can_change_related = False
class ContributorAdmin(admin.ModelAdmin):
- form = ContributorAdminForm
+ form = ContributorAdminForm
admin.site.register(ContributorType)
admin.site.register(Contributor, ContributorAdmin)
from django.contrib.auth.models import User
class ContributorType(models.Model):
- typename = models.CharField(max_length=32, null=False, blank=False)
- sortorder = models.IntegerField(null=False, default=100)
- extrainfo = models.TextField(null=True, blank=True)
- detailed = models.BooleanField(null=False, default=True)
- showemail = models.BooleanField(null=False, default=True)
+ typename = models.CharField(max_length=32, null=False, blank=False)
+ sortorder = models.IntegerField(null=False, default=100)
+ extrainfo = models.TextField(null=True, blank=True)
+ detailed = models.BooleanField(null=False, default=True)
+ showemail = models.BooleanField(null=False, default=True)
- purge_urls = ('/community/contributors/', )
+ purge_urls = ('/community/contributors/', )
- def __unicode__(self):
- return self.typename
+ def __unicode__(self):
+ return self.typename
- class Meta:
- ordering = ('sortorder',)
+ class Meta:
+ ordering = ('sortorder',)
class Contributor(models.Model):
- ctype = models.ForeignKey(ContributorType)
- lastname = models.CharField(max_length=100, null=False, blank=False)
- firstname = models.CharField(max_length=100, null=False, blank=False)
- email = models.EmailField(null=False, blank=True)
- company = models.CharField(max_length=100, null=True, blank=True)
- companyurl = models.URLField(max_length=100, null=True, blank=True, verbose_name='Company URL')
- location = models.CharField(max_length=100, null=True, blank=True)
- contribution = models.TextField(null=True, blank=True)
- user = models.ForeignKey(User, null=True, blank=True)
-
- send_notification=True
- purge_urls = ('/community/contributors/', )
-
- def __unicode__(self):
- return "%s %s" % (self.firstname, self.lastname)
-
- class Meta:
- ordering = ('lastname', 'firstname',)
+ ctype = models.ForeignKey(ContributorType)
+ lastname = models.CharField(max_length=100, null=False, blank=False)
+ firstname = models.CharField(max_length=100, null=False, blank=False)
+ email = models.EmailField(null=False, blank=True)
+ company = models.CharField(max_length=100, null=True, blank=True)
+ companyurl = models.URLField(max_length=100, null=True, blank=True, verbose_name='Company URL')
+ location = models.CharField(max_length=100, null=True, blank=True)
+ contribution = models.TextField(null=True, blank=True)
+ user = models.ForeignKey(User, null=True, blank=True)
+
+ send_notification=True
+ purge_urls = ('/community/contributors/', )
+
+ def __unicode__(self):
+ return "%s %s" % (self.firstname, self.lastname)
+
+ class Meta:
+ ordering = ('lastname', 'firstname',)
def get_struct():
- yield ('community/contributors/', None)
+ yield ('community/contributors/', None)
from models import ContributorType
def completelist(request):
- contributortypes = list(ContributorType.objects.all())
- return render_pgweb(request, 'community', 'contributors/list.html', {
- 'contributortypes': contributortypes,
- })
+ contributortypes = list(ContributorType.objects.all())
+ return render_pgweb(request, 'community', 'contributors/list.html', {
+ 'contributortypes': contributortypes,
+ })
from pgweb.core.lookups import UserLookup
class OrganisationAdminForm(forms.ModelForm):
- class Meta:
- model = Organisation
- exclude = ()
- widgets = {
- 'managers': AutoCompleteSelectMultipleWidget(lookup_class=UserLookup),
- }
-
- def __init__(self, *args, **kwargs):
- super(OrganisationAdminForm, self).__init__(*args, **kwargs)
- self.fields['managers'].widget.can_add_related = False
- self.fields['managers'].widget.can_change_related = False
- self.fields['managers'].widget.can_delete_related = False
+ class Meta:
+ model = Organisation
+ exclude = ()
+ widgets = {
+ 'managers': AutoCompleteSelectMultipleWidget(lookup_class=UserLookup),
+ }
+
+ def __init__(self, *args, **kwargs):
+ super(OrganisationAdminForm, self).__init__(*args, **kwargs)
+ self.fields['managers'].widget.can_add_related = False
+ self.fields['managers'].widget.can_change_related = False
+ self.fields['managers'].widget.can_delete_related = False
class OrganisationAdmin(admin.ModelAdmin):
- form = OrganisationAdminForm
- list_display = ('name', 'approved', 'lastconfirmed',)
- list_filter = ('approved',)
- ordering = ('name', )
- search_fields = ('name', )
+ form = OrganisationAdminForm
+ list_display = ('name', 'approved', 'lastconfirmed',)
+ list_filter = ('approved',)
+ ordering = ('name', )
+ search_fields = ('name', )
class VersionAdmin(admin.ModelAdmin):
- list_display = ('versionstring', 'reldate', 'supported', 'current', )
+ list_display = ('versionstring', 'reldate', 'supported', 'current', )
admin.site.register(Version, VersionAdmin)
admin.site.register(OrganisationType)
from datetime import datetime, time
class VersionFeed(Feed):
- title = "PostgreSQL latest versions"
- link = "https://www.postgresql.org/"
- description = "PostgreSQL latest versions"
+ title = "PostgreSQL latest versions"
+ link = "https://www.postgresql.org/"
+ description = "PostgreSQL latest versions"
- description_template = 'core/version_rss_description.html'
- title_template = 'core/version_rss_title.html'
+ description_template = 'core/version_rss_description.html'
+ title_template = 'core/version_rss_title.html'
- def items(self):
- return Version.objects.filter(tree__gt=0).filter(testing=0)
+ def items(self):
+ return Version.objects.filter(tree__gt=0).filter(testing=0)
- def item_link(self, obj):
- return "https://www.postgresql.org/docs/%s/%s" % (obj.numtree, obj.relnotes)
+ def item_link(self, obj):
+ return "https://www.postgresql.org/docs/%s/%s" % (obj.numtree, obj.relnotes)
- def item_pubdate(self, obj):
- return datetime.combine(obj.reldate,time.min)
+ def item_pubdate(self, obj):
+ return datetime.combine(obj.reldate,time.min)
from django.contrib.auth.models import User
class OrganisationForm(forms.ModelForm):
- remove_manager = forms.ModelMultipleChoiceField(required=False, queryset=None, label="Current manager(s)", help_text="Select one or more managers to remove")
- add_manager = forms.EmailField(required=False)
+ remove_manager = forms.ModelMultipleChoiceField(required=False, queryset=None, label="Current manager(s)", help_text="Select one or more managers to remove")
+ add_manager = forms.EmailField(required=False)
- class Meta:
- model = Organisation
- exclude = ('lastconfirmed', 'approved', 'managers', )
+ class Meta:
+ model = Organisation
+ exclude = ('lastconfirmed', 'approved', 'managers', )
- def __init__(self, *args, **kwargs):
- super(OrganisationForm, self).__init__(*args, **kwargs)
- if self.instance and self.instance.pk:
- self.fields['remove_manager'].queryset = self.instance.managers.all()
- else:
- del self.fields['remove_manager']
- del self.fields['add_manager']
+ def __init__(self, *args, **kwargs):
+ super(OrganisationForm, self).__init__(*args, **kwargs)
+ if self.instance and self.instance.pk:
+ self.fields['remove_manager'].queryset = self.instance.managers.all()
+ else:
+ del self.fields['remove_manager']
+ del self.fields['add_manager']
- def clean_add_manager(self):
- if self.cleaned_data['add_manager']:
- # Something was added as manager - let's make sure the user exists
- try:
- User.objects.get(email=self.cleaned_data['add_manager'].lower())
- except User.DoesNotExist:
- raise ValidationError("User with email %s not found" % self.cleaned_data['add_manager'])
+ def clean_add_manager(self):
+ if self.cleaned_data['add_manager']:
+ # Something was added as manager - let's make sure the user exists
+ try:
+ User.objects.get(email=self.cleaned_data['add_manager'].lower())
+ except User.DoesNotExist:
+ raise ValidationError("User with email %s not found" % self.cleaned_data['add_manager'])
- return self.cleaned_data['add_manager']
+ return self.cleaned_data['add_manager']
- def clean_remove_manager(self):
- if self.cleaned_data['remove_manager']:
- removecount = 0
- for toremove in self.cleaned_data['remove_manager']:
- if toremove in self.instance.managers.all():
- removecount += 1
+ def clean_remove_manager(self):
+ if self.cleaned_data['remove_manager']:
+ removecount = 0
+ for toremove in self.cleaned_data['remove_manager']:
+ if toremove in self.instance.managers.all():
+ removecount += 1
- if len(self.instance.managers.all()) - removecount <= 0:
- raise ValidationError("Cannot remove all managers from an organsation!")
- return self.cleaned_data['remove_manager']
+ if len(self.instance.managers.all()) - removecount <= 0:
+ raise ValidationError("Cannot remove all managers from an organsation!")
+ return self.cleaned_data['remove_manager']
- def save(self, commit=True):
- model = super(OrganisationForm, self).save(commit=False)
- if self.cleaned_data.has_key('add_manager') and self.cleaned_data['add_manager']:
- model.managers.add(User.objects.get(email=self.cleaned_data['add_manager'].lower()))
- if self.cleaned_data.has_key('remove_manager') and self.cleaned_data['remove_manager']:
- for toremove in self.cleaned_data['remove_manager']:
- model.managers.remove(toremove)
+ def save(self, commit=True):
+ model = super(OrganisationForm, self).save(commit=False)
+ if self.cleaned_data.has_key('add_manager') and self.cleaned_data['add_manager']:
+ model.managers.add(User.objects.get(email=self.cleaned_data['add_manager'].lower()))
+ if self.cleaned_data.has_key('remove_manager') and self.cleaned_data['remove_manager']:
+ for toremove in self.cleaned_data['remove_manager']:
+ model.managers.remove(toremove)
- return model
+ return model
- def apply_submitter(self, model, User):
- model.managers.add(User)
+ def apply_submitter(self, model, User):
+ model.managers.add(User)
class MergeOrgsForm(forms.Form):
- merge_into = forms.ModelChoiceField(queryset=Organisation.objects.all())
- merge_from = forms.ModelChoiceField(queryset=Organisation.objects.all())
+ merge_into = forms.ModelChoiceField(queryset=Organisation.objects.all())
+ merge_from = forms.ModelChoiceField(queryset=Organisation.objects.all())
- def clean(self):
- if self.cleaned_data['merge_into'] == self.cleaned_data['merge_from']:
- raise ValidationError("The two organisations selected must be different!")
- return self.cleaned_data
+ def clean(self):
+ if self.cleaned_data['merge_into'] == self.cleaned_data['merge_from']:
+ raise ValidationError("The two organisations selected must be different!")
+ return self.cleaned_data
@staff_member_required
class UserLookup(ModelLookup):
- model = User
- search_fields = (
- 'username__icontains',
- 'first_name__icontains',
- 'last_name__icontains',
- )
- filters = {'is_active': True, }
+ model = User
+ search_fields = (
+ 'username__icontains',
+ 'first_name__icontains',
+ 'last_name__icontains',
+ )
+ filters = {'is_active': True, }
- def get_item_value(self, item):
- # Display for currently selected item
- return u"%s (%s)" % (item.username, item.get_full_name())
+ def get_item_value(self, item):
+ # Display for currently selected item
+ return u"%s (%s)" % (item.username, item.get_full_name())
- def get_item_label(self, item):
- # Display for choice listings
- return u"%s (%s)" % (item.username, item.get_full_name())
+ def get_item_label(self, item):
+ # Display for choice listings
+ return u"%s (%s)" % (item.username, item.get_full_name())
registry.register(UserLookup)
from pgweb.account.models import EmailChangeToken
class Command(BaseCommand):
- help = 'Cleanup old records'
+ help = 'Cleanup old records'
- def handle(self, *args, **options):
- # Grab advisory lock, if available. Lock id is just a random number
- # since we only need to interlock against ourselves. The lock is
- # automatically released when we're done.
- curs = connection.cursor()
- curs.execute("SELECT pg_try_advisory_lock(2896719)")
- if not curs.fetchall()[0][0]:
- print "Failed to get advisory lock, existing cleanup_old_records process stuck?"
- sys.exit(1)
+ def handle(self, *args, **options):
+ # Grab advisory lock, if available. Lock id is just a random number
+ # since we only need to interlock against ourselves. The lock is
+ # automatically released when we're done.
+ curs = connection.cursor()
+ curs.execute("SELECT pg_try_advisory_lock(2896719)")
+ if not curs.fetchall()[0][0]:
+ print "Failed to get advisory lock, existing cleanup_old_records process stuck?"
+ sys.exit(1)
- # Clean up old email change tokens
- with transaction.atomic():
- EmailChangeToken.objects.filter(sentat__lt=datetime.now()-timedelta(hours=24)).delete()
+ # Clean up old email change tokens
+ with transaction.atomic():
+ EmailChangeToken.objects.filter(sentat__lt=datetime.now()-timedelta(hours=24)).delete()
from pgweb.core.models import ImportedRSSFeed, ImportedRSSItem
class Command(BaseCommand):
- help = 'Fetch RSS feeds'
-
- def handle(self, *args, **options):
- socket.setdefaulttimeout(20)
-
- with transaction.atomic():
- for importfeed in ImportedRSSFeed.objects.all():
- try:
- feed = feedparser.parse(importfeed.url)
-
- if not hasattr(feed, 'status'):
- # bozo_excpetion can seemingly be set when there is no error as well,
- # so make sure we only check if we didn't get a status.
- if hasattr(feed,'bozo_exception'):
- raise Exception('Feed load error %s' % feed.bozo_exception)
- raise Exception('Feed load error with no exception!')
- if feed.status != 200:
- raise Exception('Feed returned status %s' % feed.status)
-
- fetchedsomething = False
- for entry in feed.entries:
- try:
- item = ImportedRSSItem.objects.get(feed=importfeed, url=entry.link)
- except ImportedRSSItem.DoesNotExist:
- item = ImportedRSSItem(feed=importfeed,
- title=entry.title[:100],
- url=entry.link,
- posttime=datetime(*(entry.published_parsed[0:6])),
- )
- item.save()
- fetchedsomething = True
-
- if fetchedsomething:
- importfeed.purge_related()
- except Exception, e:
- print "Failed to load %s: %s" % (importfeed, e)
+ help = 'Fetch RSS feeds'
+
+ def handle(self, *args, **options):
+ socket.setdefaulttimeout(20)
+
+ with transaction.atomic():
+ for importfeed in ImportedRSSFeed.objects.all():
+ try:
+ feed = feedparser.parse(importfeed.url)
+
+ if not hasattr(feed, 'status'):
+ # bozo_excpetion can seemingly be set when there is no error as well,
+ # so make sure we only check if we didn't get a status.
+ if hasattr(feed,'bozo_exception'):
+ raise Exception('Feed load error %s' % feed.bozo_exception)
+ raise Exception('Feed load error with no exception!')
+ if feed.status != 200:
+ raise Exception('Feed returned status %s' % feed.status)
+
+ fetchedsomething = False
+ for entry in feed.entries:
+ try:
+ item = ImportedRSSItem.objects.get(feed=importfeed, url=entry.link)
+ except ImportedRSSItem.DoesNotExist:
+ item = ImportedRSSItem(feed=importfeed,
+ title=entry.title[:100],
+ url=entry.link,
+ posttime=datetime(*(entry.published_parsed[0:6])),
+ )
+ item.save()
+ fetchedsomething = True
+
+ if fetchedsomething:
+ importfeed.purge_related()
+ except Exception, e:
+ print "Failed to load %s: %s" % (importfeed, e)
from pgweb.util.misc import send_template_mail
class Command(BaseCommand):
- help = 'Send moderation report'
+ help = 'Send moderation report'
- def handle(self, *args, **options):
- with transaction.atomic():
- counts = [{'name': unicode(x['name']), 'count': len(x['entries'])} for x in get_all_pending_moderations()]
- if len(counts):
- # Generate an email and send it off
- send_template_mail(settings.NOTIFICATION_FROM,
- settings.NOTIFICATION_EMAIL,
- "PostgreSQL moderation report: %s" % datetime.now(),
- "core/moderation_report.txt",
- {
- 'items': counts,
- })
+ def handle(self, *args, **options):
+ with transaction.atomic():
+ counts = [{'name': unicode(x['name']), 'count': len(x['entries'])} for x in get_all_pending_moderations()]
+ if len(counts):
+ # Generate an email and send it off
+ send_template_mail(settings.NOTIFICATION_FROM,
+ settings.NOTIFICATION_EMAIL,
+ "PostgreSQL moderation report: %s" % datetime.now(),
+ "core/moderation_report.txt",
+ {
+ 'items': counts,
+ })
from django.contrib.auth.models import User
class Command(BaseCommand):
- help = 'Dump interesting information about a session'
-
- def add_arguments(self, parser):
- parser.add_argument('sessionid')
-
- def handle(self, *args, **options):
- try:
- session = Session.objects.get(session_key=options['sessionid']).get_decoded()
- uid = session.get('_auth_user_id')
-
- print u"Session {0}".format(options['sessionid'])
-
- try:
- user = User.objects.get(pk=uid)
- print " -- Logged in user --"
- print u"Userid: {0}".format(uid)
- print u"Username: {0}".format(user.username)
- print u"Name: {0}".format(user.get_full_name())
- print u"Email: {0}".format(user.email)
- except User.DoesNotExist:
- print "** Associated user not found. Maybe not logged in?"
-
- # Remove known keys
- for k in ('_auth_user_id', '_auth_user_hash', '_auth_user_backend'):
- session.pop(k, None)
- if session:
- print " -- Other session values --"
- for k,v in session.items():
- print u"{0:20} {1}".format(k,v)
-
- except Session.DoesNotExist:
- raise CommandError('Session not found')
+ help = 'Dump interesting information about a session'
+
+ def add_arguments(self, parser):
+ parser.add_argument('sessionid')
+
+ def handle(self, *args, **options):
+ try:
+ session = Session.objects.get(session_key=options['sessionid']).get_decoded()
+ uid = session.get('_auth_user_id')
+
+ print u"Session {0}".format(options['sessionid'])
+
+ try:
+ user = User.objects.get(pk=uid)
+ print " -- Logged in user --"
+ print u"Userid: {0}".format(uid)
+ print u"Username: {0}".format(user.username)
+ print u"Name: {0}".format(user.get_full_name())
+ print u"Email: {0}".format(user.email)
+ except User.DoesNotExist:
+ print "** Associated user not found. Maybe not logged in?"
+
+ # Remove known keys
+ for k in ('_auth_user_id', '_auth_user_hash', '_auth_user_backend'):
+ session.pop(k, None)
+ if session:
+ print " -- Other session values --"
+ for k,v in session.items():
+ print u"{0:20} {1}".format(k,v)
+
+ except Session.DoesNotExist:
+ raise CommandError('Session not found')
import base64
TESTING_CHOICES = (
- (0, 'Release'),
- (1, 'Release candidate'),
- (2, 'Beta'),
- (3, 'Alpha'),
- )
+ (0, 'Release'),
+ (1, 'Release candidate'),
+ (2, 'Beta'),
+ (3, 'Alpha'),
+ )
TESTING_SHORTSTRING = ('', 'rc', 'beta', 'alpha')
class Version(models.Model):
- tree = models.DecimalField(max_digits=3, decimal_places=1, null=False, blank=False, unique=True)
- latestminor = models.IntegerField(null=False, blank=False, default=0, help_text="For testing versions, latestminor means latest beta/rc number. For other releases, it's the latest minor release number in the tree.")
- reldate = models.DateField(null=False, blank=False)
- relnotes = models.CharField(max_length=32, null=False, blank=False)
- current = models.BooleanField(null=False, blank=False, default=False)
- supported = models.BooleanField(null=False, blank=False, default=True)
- testing = models.IntegerField(null=False, blank=False, default=0, help_text="Testing level of this release. latestminor indicates beta/rc number", choices=TESTING_CHOICES)
- docsloaded = models.DateTimeField(null=True, blank=True, help_text="The timestamp of the latest docs load. Used to control indexing and info on developer docs.")
- firstreldate = models.DateField(null=False, blank=False, help_text="The date of the .0 release in this tree")
- eoldate = models.DateField(null=False, blank=False, help_text="The final release date for this tree")
-
- def __unicode__(self):
- return self.versionstring
-
- @property
- def versionstring(self):
- return self.buildversionstring(self.latestminor)
-
- @property
- def numtree(self):
- # Return the proper numeric tree version, taking into account that PostgreSQL 10
- # changed from x.y to x for major version.
- if self.tree >= 10:
- return int(self.tree)
- else:
- return self.tree
-
- def buildversionstring(self, minor):
- if not self.testing:
- return "%s.%s" % (self.numtree, minor)
- else:
- return "%s%s%s" % (self.numtree, TESTING_SHORTSTRING[self.testing], minor)
-
- @property
- def treestring(self):
- if not self.testing:
- return "%s" % self.numtree
- else:
- return "%s %s" % (self.numtree, TESTING_SHORTSTRING[self.testing])
-
- def save(self):
- # Make sure only one version at a time can be the current one.
- # (there may be some small race conditions here, but the likelyhood
- # that two admins are editing the version list at the same time...)
- if self.current:
- previous = Version.objects.filter(current=True)
- for p in previous:
- if not p == self:
- p.current = False
- p.save() # primary key check avoids recursion
-
- # Now that we've made any previously current ones non-current, we are
- # free to save this one.
- super(Version, self).save()
-
- class Meta:
- ordering = ('-tree', )
-
- def purge_urls(self):
- yield '/$'
- yield '/support/versioning'
- yield '/support/security'
- yield '/docs/$'
- yield '/docs/manuals'
- yield '/about/featurematrix/$'
- yield '/versions.rss'
+ tree = models.DecimalField(max_digits=3, decimal_places=1, null=False, blank=False, unique=True)
+ latestminor = models.IntegerField(null=False, blank=False, default=0, help_text="For testing versions, latestminor means latest beta/rc number. For other releases, it's the latest minor release number in the tree.")
+ reldate = models.DateField(null=False, blank=False)
+ relnotes = models.CharField(max_length=32, null=False, blank=False)
+ current = models.BooleanField(null=False, blank=False, default=False)
+ supported = models.BooleanField(null=False, blank=False, default=True)
+ testing = models.IntegerField(null=False, blank=False, default=0, help_text="Testing level of this release. latestminor indicates beta/rc number", choices=TESTING_CHOICES)
+ docsloaded = models.DateTimeField(null=True, blank=True, help_text="The timestamp of the latest docs load. Used to control indexing and info on developer docs.")
+ firstreldate = models.DateField(null=False, blank=False, help_text="The date of the .0 release in this tree")
+ eoldate = models.DateField(null=False, blank=False, help_text="The final release date for this tree")
+
+ def __unicode__(self):
+ return self.versionstring
+
+ @property
+ def versionstring(self):
+ return self.buildversionstring(self.latestminor)
+
+ @property
+ def numtree(self):
+ # Return the proper numeric tree version, taking into account that PostgreSQL 10
+ # changed from x.y to x for major version.
+ if self.tree >= 10:
+ return int(self.tree)
+ else:
+ return self.tree
+
+ def buildversionstring(self, minor):
+ if not self.testing:
+ return "%s.%s" % (self.numtree, minor)
+ else:
+ return "%s%s%s" % (self.numtree, TESTING_SHORTSTRING[self.testing], minor)
+
+ @property
+ def treestring(self):
+ if not self.testing:
+ return "%s" % self.numtree
+ else:
+ return "%s %s" % (self.numtree, TESTING_SHORTSTRING[self.testing])
+
+ def save(self):
+ # Make sure only one version at a time can be the current one.
+ # (there may be some small race conditions here, but the likelyhood
+ # that two admins are editing the version list at the same time...)
+ if self.current:
+ previous = Version.objects.filter(current=True)
+ for p in previous:
+ if not p == self:
+ p.current = False
+ p.save() # primary key check avoids recursion
+
+ # Now that we've made any previously current ones non-current, we are
+ # free to save this one.
+ super(Version, self).save()
+
+ class Meta:
+ ordering = ('-tree', )
+
+ def purge_urls(self):
+ yield '/$'
+ yield '/support/versioning'
+ yield '/support/security'
+ yield '/docs/$'
+ yield '/docs/manuals'
+ yield '/about/featurematrix/$'
+ yield '/versions.rss'
class Country(models.Model):
- name = models.CharField(max_length=100, null=False, blank=False)
- tld = models.CharField(max_length=3, null=False, blank=False)\r
+ name = models.CharField(max_length=100, null=False, blank=False)
+ tld = models.CharField(max_length=3, null=False, blank=False)\r
- class Meta:
- db_table = 'countries'
- ordering = ('name',)
- verbose_name = 'Country'
- verbose_name_plural = 'Countries'
+ class Meta:
+ db_table = 'countries'
+ ordering = ('name',)
+ verbose_name = 'Country'
+ verbose_name_plural = 'Countries'
- def __unicode__(self):\r
- return self.name
+ def __unicode__(self):\r
+ return self.name
class Language(models.Model):
- # Import data from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
- # (yes, there is a UTF16 BOM in the UTF8 file)
- # (and yes, there is a 7 length value in a field specified as 3 chars)
- alpha3 = models.CharField(max_length=7, null=False, blank=False, primary_key=True)
- alpha3term = models.CharField(max_length=3, null=False, blank=True)
- alpha2 = models.CharField(max_length=2, null=False, blank=True)
- name = models.CharField(max_length=100, null=False, blank=False)
- frenchname = models.CharField(max_length=100, null=False, blank=False)
+ # Import data from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
+ # (yes, there is a UTF16 BOM in the UTF8 file)
+ # (and yes, there is a 7 length value in a field specified as 3 chars)
+ alpha3 = models.CharField(max_length=7, null=False, blank=False, primary_key=True)
+ alpha3term = models.CharField(max_length=3, null=False, blank=True)
+ alpha2 = models.CharField(max_length=2, null=False, blank=True)
+ name = models.CharField(max_length=100, null=False, blank=False)
+ frenchname = models.CharField(max_length=100, null=False, blank=False)
- class Meta:
- ordering = ('name', )
+ class Meta:
+ ordering = ('name', )
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
class OrganisationType(models.Model):
- typename = models.CharField(max_length=32, null=False, blank=False)
+ typename = models.CharField(max_length=32, null=False, blank=False)
- def __unicode__(self):
- return self.typename
+ def __unicode__(self):
+ return self.typename
class Organisation(models.Model):
- name = models.CharField(max_length=100, null=False, blank=False, unique=True)
- approved = models.BooleanField(null=False, default=False)
- address = models.TextField(null=False, blank=True)
- url = models.URLField(null=False, blank=False)
- email = models.EmailField(null=False, blank=True)
- phone = models.CharField(max_length=100, null=False, blank=True)
- orgtype = models.ForeignKey(OrganisationType, null=False, blank=False, verbose_name="Organisation type")
- managers = models.ManyToManyField(User, blank=False)
- lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True)
+ name = models.CharField(max_length=100, null=False, blank=False, unique=True)
+ approved = models.BooleanField(null=False, default=False)
+ address = models.TextField(null=False, blank=True)
+ url = models.URLField(null=False, blank=False)
+ email = models.EmailField(null=False, blank=True)
+ phone = models.CharField(max_length=100, null=False, blank=True)
+ orgtype = models.ForeignKey(OrganisationType, null=False, blank=False, verbose_name="Organisation type")
+ managers = models.ManyToManyField(User, blank=False)
+ lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True)
- send_notification = True
- send_m2m_notification = True
+ send_notification = True
+ send_m2m_notification = True
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
- class Meta:
- ordering = ('name',)
+ class Meta:
+ ordering = ('name',)
# Basic classes for importing external RSS feeds, such as planet
class ImportedRSSFeed(models.Model):
- internalname = models.CharField(max_length=32, null=False, blank=False, unique=True)
- url = models.URLField(null=False, blank=False)
- purgepattern = models.CharField(max_length=512, null=False, blank=True, help_text="NOTE! Pattern will be automatically anchored with ^ at the beginning, but you must lead with a slash in most cases - and don't forget to include the trailing $ in most cases")
+ internalname = models.CharField(max_length=32, null=False, blank=False, unique=True)
+ url = models.URLField(null=False, blank=False)
+ purgepattern = models.CharField(max_length=512, null=False, blank=True, help_text="NOTE! Pattern will be automatically anchored with ^ at the beginning, but you must lead with a slash in most cases - and don't forget to include the trailing $ in most cases")
- def purge_related(self):
- if self.purgepattern:
- varnish_purge(self.purgepattern)
+ def purge_related(self):
+ if self.purgepattern:
+ varnish_purge(self.purgepattern)
- def __unicode__(self):
- return self.internalname
+ def __unicode__(self):
+ return self.internalname
class ImportedRSSItem(models.Model):
- feed = models.ForeignKey(ImportedRSSFeed)
- title = models.CharField(max_length=100, null=False, blank=False)
- url = models.URLField(null=False, blank=False)
- posttime = models.DateTimeField(null=False, blank=False)
+ feed = models.ForeignKey(ImportedRSSFeed)
+ title = models.CharField(max_length=100, null=False, blank=False)
+ url = models.URLField(null=False, blank=False)
+ posttime = models.DateTimeField(null=False, blank=False)
- def __unicode__(self):
- return self.title
+ def __unicode__(self):
+ return self.title
- @property
- def date(self):
- return self.posttime.strftime("%Y-%m-%d")
+ @property
+ def date(self):
+ return self.posttime.strftime("%Y-%m-%d")
# From man sshd, except for ssh-dss
_valid_keytypes = ['ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519', 'ssh-rsa']
# Options, keytype, key, comment. But we don't support options.
def validate_sshkey(key):
- lines = key.splitlines()
- for k in lines:
- pieces = k.split()
- if len(pieces) == 0:
- raise ValidationError("Empty keys are not allowed")
- if len(pieces) > 3:
- raise ValidationError('Paste each ssh key without options, e.g. "ssh-rsa AAAAbbbcc mykey@machine"')
- if pieces[0] == 'ssh-dss':
- raise ValidationError("For security reasons, ssh-dss keys are not supported")
- if pieces[0] not in _valid_keytypes:
- raise ValidationError(u"Only keys of types {0} are supported, not {1}.".format(", ".join(_valid_keytypes), pieces[0]))
- try:
- base64.b64decode(pieces[1])
- except:
- raise ValidationError("Incorrect base64 encoded key!")
+ lines = key.splitlines()
+ for k in lines:
+ pieces = k.split()
+ if len(pieces) == 0:
+ raise ValidationError("Empty keys are not allowed")
+ if len(pieces) > 3:
+ raise ValidationError('Paste each ssh key without options, e.g. "ssh-rsa AAAAbbbcc mykey@machine"')
+ if pieces[0] == 'ssh-dss':
+ raise ValidationError("For security reasons, ssh-dss keys are not supported")
+ if pieces[0] not in _valid_keytypes:
+ raise ValidationError(u"Only keys of types {0} are supported, not {1}.".format(", ".join(_valid_keytypes), pieces[0]))
+ try:
+ base64.b64decode(pieces[1])
+ except:
+ raise ValidationError("Incorrect base64 encoded key!")
# Extra attributes for users (if they have them)
class UserProfile(models.Model):
- user = models.OneToOneField(User, null=False, blank=False, primary_key=True)
- sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text= "Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ])
- lastmodified = models.DateTimeField(null=False, blank=False, auto_now=True)
+ user = models.OneToOneField(User, null=False, blank=False, primary_key=True)
+ sshkey = models.TextField(null=False, blank=True, verbose_name="SSH key", help_text= "Paste one or more public keys in OpenSSH format, one per line.", validators=[validate_sshkey, ])
+ lastmodified = models.DateTimeField(null=False, blank=False, auto_now=True)
# Notifications sent for any moderated content.
# Yes, we uglify it by storing the type of object as a string, so we don't
# end up with a bazillion fields being foreign keys. Ugly, but works.
class ModerationNotification(models.Model):
- objectid = models.IntegerField(null=False, blank=False, db_index=True)
- objecttype = models.CharField(null=False, blank=False, max_length=100)
- text = models.TextField(null=False, blank=False)
- author = models.CharField(null=False, blank=False, max_length=100)
- date = models.DateTimeField(null=False, blank=False, auto_now=True)
+ objectid = models.IntegerField(null=False, blank=False, db_index=True)
+ objecttype = models.CharField(null=False, blank=False, max_length=100)
+ text = models.TextField(null=False, blank=False)
+ author = models.CharField(null=False, blank=False, max_length=100)
+ date = models.DateTimeField(null=False, blank=False, auto_now=True)
- def __unicode__(self):
- return "%s id %s (%s): %s" % (self.objecttype, self.objectid, self.date, self.text[:50])
+ def __unicode__(self):
+ return "%s id %s (%s): %s" % (self.objecttype, self.objectid, self.date, self.text[:50])
- class Meta:
- ordering = ('-date', )
+ class Meta:
+ ordering = ('-date', )
import os
def get_struct():
- yield ('', None)
- yield ('community/', None)
- yield ('support/versioning/', None)
+ yield ('', None)
+ yield ('community/', None)
+ yield ('support/versioning/', None)
- # Enumerate all the templates that will generate pages
- pages_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../templates/pages/'))
- for root, dirs, files in os.walk(pages_dir):
- # Cut out the reference to the absolute root path
- r = '' if root == pages_dir else os.path.relpath(root, pages_dir)
- for f in files:
- if f.endswith('.html'):
- yield (os.path.join(r, f)[:-5] + "/",
- None)
+ # Enumerate all the templates that will generate pages
+ pages_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../templates/pages/'))
+ for root, dirs, files in os.walk(pages_dir):
+ # Cut out the reference to the absolute root path
+ r = '' if root == pages_dir else os.path.relpath(root, pages_dir)
+ for f in files:
+ if f.endswith('.html'):
+ yield (os.path.join(r, f)[:-5] + "/",
+ None)
@register.filter(name='class_name')
def class_name(ob):
- return ob.__class__.__name__
+ return ob.__class__.__name__
@register.filter(is_safe=True)
def field_class(value, arg):
- if 'class' in value.field.widget.attrs:
- c = arg + ' ' + value.field.widget.attrs['class']
- else:
- c = arg
- return value.as_widget(attrs={"class": c})
+ if 'class' in value.field.widget.attrs:
+ c = arg + ' ' + value.field.widget.attrs['class']
+ else:
+ c = arg
+ return value.as_widget(attrs={"class": c})
@register.filter(name='hidemail')
@stringfilter
def hidemail(value):
- return value.replace('@', ' at ')
+ return value.replace('@', ' at ')
@register.filter(is_safe=True)
def ischeckbox(obj):
- return obj.field.widget.__class__.__name__ in ["CheckboxInput", "CheckboxSelectMultiple"] and not getattr(obj.field, 'regular_field', False)
+ return obj.field.widget.__class__.__name__ in ["CheckboxInput", "CheckboxSelectMultiple"] and not getattr(obj.field, 'regular_field', False)
@register.filter(is_safe=True)
def ismultiplecheckboxes(obj):
- return obj.field.widget.__class__.__name__ == "CheckboxSelectMultiple" and not getattr(obj.field, 'regular_field', False)
+ return obj.field.widget.__class__.__name__ == "CheckboxSelectMultiple" and not getattr(obj.field, 'regular_field', False)
@register.filter(is_safe=True)
def isrequired_error(obj):
- if obj.errors and obj.errors[0] == u"This field is required.":
- return True
- return False
+ if obj.errors and obj.errors[0] == u"This field is required.":
+ return True
+ return False
@register.filter(is_safe=True)
def label_class(value, arg):
- return value.label_tag(attrs={'class': arg})
+ return value.label_tag(attrs={'class': arg})
@register.filter()
def planet_author(obj):
- # takes a ImportedRSSItem object from a Planet feed and extracts the author
- # information from the title
- return obj.title.split(':')[0]
+ # takes a ImportedRSSItem object from a Planet feed and extracts the author
+ # information from the title
+ return obj.title.split(':')[0]
@register.filter()
def planet_title(obj):
- # takes a ImportedRSSItem object from a Planet feed and extracts the info
- # specific to the title of the Planet entry
- return ":".join(obj.title.split(':')[1:])
+ # takes a ImportedRSSItem object from a Planet feed and extracts the info
+ # specific to the title of the Planet entry
+ return ":".join(obj.title.split(':')[1:])
@register.filter(name='dictlookup')
def dictlookup(value, key):
@register.filter(name='json')
def tojson(value):
- return json.dumps(value)
+ return json.dumps(value)
# Front page view
@cache(minutes=10)
def home(request):
- news = NewsArticle.objects.filter(approved=True)[:5]
- today = date.today()
- # get up to seven events to display on the homepage
- event_base_queryset = Event.objects.select_related('country').filter(
- approved=True,
- enddate__gte=today,
- )
- # first, see if there are up to two non-badged events within 90 days
- other_events = event_base_queryset.filter(
- badged=False,
- startdate__lte=today + timedelta(days=90),
- ).order_by('enddate', 'startdate')[:2]
- # based on that, get 7 - |other_events| community events to display
- community_event_queryset = event_base_queryset.filter(badged=True).order_by('enddate', 'startdate')[:(7 - other_events.count())]
- # now, return all the events in one unioned array!
- events = community_event_queryset.union(other_events).order_by('enddate', 'startdate').all()
- versions = Version.objects.filter(supported=True)
- planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:9]
-
- return render(request, 'index.html', {
- 'title': 'The world\'s most advanced open source database',
- 'news': news,
- 'newstags': NewsTag.objects.all(),
- 'events': events,
- 'versions': versions,
- 'planet': planet,
- })
+ news = NewsArticle.objects.filter(approved=True)[:5]
+ today = date.today()
+ # get up to seven events to display on the homepage
+ event_base_queryset = Event.objects.select_related('country').filter(
+ approved=True,
+ enddate__gte=today,
+ )
+ # first, see if there are up to two non-badged events within 90 days
+ other_events = event_base_queryset.filter(
+ badged=False,
+ startdate__lte=today + timedelta(days=90),
+ ).order_by('enddate', 'startdate')[:2]
+ # based on that, get 7 - |other_events| community events to display
+ community_event_queryset = event_base_queryset.filter(badged=True).order_by('enddate', 'startdate')[:(7 - other_events.count())]
+ # now, return all the events in one unioned array!
+ events = community_event_queryset.union(other_events).order_by('enddate', 'startdate').all()
+ versions = Version.objects.filter(supported=True)
+ planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:9]
+
+ return render(request, 'index.html', {
+ 'title': 'The world\'s most advanced open source database',
+ 'news': news,
+ 'newstags': NewsTag.objects.all(),
+ 'events': events,
+ 'versions': versions,
+ 'planet': planet,
+ })
# About page view (contains information about PostgreSQL + random quotes)
@cache(minutes=10)
def about(request):
- # get 5 random quotes
- quotes = Quote.objects.filter(approved=True).order_by('?').all()[:5]
- return render_pgweb(request, 'about', 'core/about.html', {
- 'quotes': quotes,
- })
+ # get 5 random quotes
+ quotes = Quote.objects.filter(approved=True).order_by('?').all()[:5]
+ return render_pgweb(request, 'about', 'core/about.html', {
+ 'quotes': quotes,
+ })
# Community main page (contains surveys and potentially more)
def community(request):
- s = Survey.objects.filter(current=True)
- try:
- s = s[0]
- except:
- s = None
- planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:7]
- return render_pgweb(request, 'community', 'core/community.html', {
- 'survey': s,
- 'planet': planet,
- })
+ s = Survey.objects.filter(current=True)
+ try:
+ s = s[0]
+ except:
+ s = None
+ planet = ImportedRSSItem.objects.filter(feed__internalname="planet").order_by("-posttime")[:7]
+ return render_pgweb(request, 'community', 'core/community.html', {
+ 'survey': s,
+ 'planet': planet,
+ })
# List of supported versions
def versions(request):
- return render_pgweb(request, 'support', 'support/versioning.html', {
- 'versions': Version.objects.filter(tree__gt=0).filter(testing=0),
- })
+ return render_pgweb(request, 'support', 'support/versioning.html', {
+ 'versions': Version.objects.filter(tree__gt=0).filter(testing=0),
+ })
re_staticfilenames = re.compile("^[0-9A-Z/_-]+$", re.IGNORECASE)
# Generic fallback view for static pages
def fallback(request, url):
- if url.find('..') > -1:
- raise Http404('Page not found.')
-
- if not re_staticfilenames.match(url):
- raise Http404('Page not found.')
-
- try:
- t = loader.get_template('pages/%s.html' % url)
- except TemplateDoesNotExist:
- try:
- t = loader.get_template('pages/%s/en.html' % url)
- except TemplateDoesNotExist:
- raise Http404('Page not found.')
-
- # Guestimate the nav section by looking at the URL and taking the first
- # piece of it.
- try:
- navsect = url.split('/',2)[0]
- except:
- navsect = ''
- c = PGWebContextProcessor(request)
- c.update({'navmenu': get_nav_menu(navsect)})
- return HttpResponse(t.render(c))
+ if url.find('..') > -1:
+ raise Http404('Page not found.')
+
+ if not re_staticfilenames.match(url):
+ raise Http404('Page not found.')
+
+ try:
+ t = loader.get_template('pages/%s.html' % url)
+ except TemplateDoesNotExist:
+ try:
+ t = loader.get_template('pages/%s/en.html' % url)
+ except TemplateDoesNotExist:
+ raise Http404('Page not found.')
+
+ # Guestimate the nav section by looking at the URL and taking the first
+ # piece of it.
+ try:
+ navsect = url.split('/',2)[0]
+ except:
+ navsect = ''
+ c = PGWebContextProcessor(request)
+ c.update({'navmenu': get_nav_menu(navsect)})
+ return HttpResponse(t.render(c))
# Edit-forms for core objects
@login_required
def organisationform(request, itemid):
- if itemid != 'new':
- get_object_or_404(Organisation, pk=itemid, managers=request.user)
+ if itemid != 'new':
+ get_object_or_404(Organisation, pk=itemid, managers=request.user)
- return simple_form(Organisation, itemid, request, OrganisationForm,
- redirect='/account/edit/organisations/')
+ return simple_form(Organisation, itemid, request, OrganisationForm,
+ redirect='/account/edit/organisations/')
# robots.txt
def robots(request):
- return HttpResponse("""User-agent: *
+ return HttpResponse("""User-agent: *
Disallow: /admin/
Disallow: /account/
Disallow: /docs/devel/
def _make_sitemap(pagelist):
- resp = HttpResponse(content_type='text/xml')
- x = PgXmlHelper(resp)
- x.startDocument()
- x.startElement('urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'})
- pages = 0
- for p in pagelist:
- pages+=1
- x.startElement('url', {})
- x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0]))
- if len(p) > 1 and p[1]:
- x.add_xml_element('priority', unicode(p[1]))
- if len(p) > 2 and p[2]:
- x.add_xml_element('lastmod', p[2].isoformat() + "Z")
- x.endElement('url')
- x.endElement('urlset')
- x.endDocument()
- return resp
+ resp = HttpResponse(content_type='text/xml')
+ x = PgXmlHelper(resp)
+ x.startDocument()
+ x.startElement('urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'})
+ pages = 0
+ for p in pagelist:
+ pages+=1
+ x.startElement('url', {})
+ x.add_xml_element('loc', 'https://www.postgresql.org/%s' % urllib.quote(p[0]))
+ if len(p) > 1 and p[1]:
+ x.add_xml_element('priority', unicode(p[1]))
+ if len(p) > 2 and p[2]:
+ x.add_xml_element('lastmod', p[2].isoformat() + "Z")
+ x.endElement('url')
+ x.endElement('urlset')
+ x.endDocument()
+ return resp
# Sitemap (XML format)
@cache(hours=6)
def sitemap(request):
- return _make_sitemap(get_all_pages_struct())
+ return _make_sitemap(get_all_pages_struct())
# Internal sitemap (only for our own search engine)
# Note! Still served up to anybody who wants it, so don't
# put anything secret in it...
@cache(hours=6)
def sitemap_internal(request):
- return _make_sitemap(get_all_pages_struct(method='get_internal_struct'))
+ return _make_sitemap(get_all_pages_struct(method='get_internal_struct'))
# dynamic CSS serving, meaning we merge a number of different CSS into a
# single one, making sure it turns into a single http response. We do this
# dynamically, since the output will be cached.
_dynamic_cssmap = {
- 'base': ['media/css/main.css',
- 'media/css/normalize.css',],
- 'docs': ['media/css/global.css',
- 'media/css/table.css',
- 'media/css/text.css',
- 'media/css/docs.css'],
- }
+ 'base': ['media/css/main.css',
+ 'media/css/normalize.css',],
+ 'docs': ['media/css/global.css',
+ 'media/css/table.css',
+ 'media/css/text.css',
+ 'media/css/docs.css'],
+ }
@cache(hours=6)
def dynamic_css(request, css):
- if not _dynamic_cssmap.has_key(css):
- raise Http404('CSS not found')
- files = _dynamic_cssmap[css]
- resp = HttpResponse(content_type='text/css')
-
- # We honor if-modified-since headers by looking at the most recently
- # touched CSS file.
- latestmod = 0
- for fn in files:
- try:
- stime = os.stat(fn).st_mtime
- if latestmod < stime:
- latestmod = stime
- except OSError:
- # If we somehow referred to a file that didn't exist, or
- # one that we couldn't access.
- raise Http404('CSS (sub) not found')
- if request.META.has_key('HTTP_IF_MODIFIED_SINCE'):
- # This code is mostly stolen from django :)
- matches = re.match(r"^([^;]+)(; length=([0-9]+))?$",
- request.META.get('HTTP_IF_MODIFIED_SINCE'),
- re.IGNORECASE)
- header_mtime = parse_http_date(matches.group(1))
- # We don't do length checking, just the date
- if int(latestmod) <= header_mtime:
- return HttpResponseNotModified(content_type='text/css')
- resp['Last-Modified'] = http_date(latestmod)
-
- for fn in files:
- with open(fn) as f:
- resp.write("/* %s */\n" % fn)
- resp.write(f.read())
- resp.write("\n")
-
- return resp
+ if not _dynamic_cssmap.has_key(css):
+ raise Http404('CSS not found')
+ files = _dynamic_cssmap[css]
+ resp = HttpResponse(content_type='text/css')
+
+ # We honor if-modified-since headers by looking at the most recently
+ # touched CSS file.
+ latestmod = 0
+ for fn in files:
+ try:
+ stime = os.stat(fn).st_mtime
+ if latestmod < stime:
+ latestmod = stime
+ except OSError:
+ # If we somehow referred to a file that didn't exist, or
+ # one that we couldn't access.
+ raise Http404('CSS (sub) not found')
+ if request.META.has_key('HTTP_IF_MODIFIED_SINCE'):
+ # This code is mostly stolen from django :)
+ matches = re.match(r"^([^;]+)(; length=([0-9]+))?$",
+ request.META.get('HTTP_IF_MODIFIED_SINCE'),
+ re.IGNORECASE)
+ header_mtime = parse_http_date(matches.group(1))
+ # We don't do length checking, just the date
+ if int(latestmod) <= header_mtime:
+ return HttpResponseNotModified(content_type='text/css')
+ resp['Last-Modified'] = http_date(latestmod)
+
+ for fn in files:
+ with open(fn) as f:
+ resp.write("/* %s */\n" % fn)
+ resp.write(f.read())
+ resp.write("\n")
+
+ return resp
@nocache
def csrf_failure(request, reason=''):
- resp = render(request, 'errors/csrf_failure.html', {
- 'reason': reason,
- })
- resp.status_code = 403 # Forbidden
- return resp
+ resp = render(request, 'errors/csrf_failure.html', {
+ 'reason': reason,
+ })
+ resp.status_code = 403 # Forbidden
+ return resp
# Basic information about the connection
@cache(seconds=30)
def system_information(request):
- return render(request,'core/system_information.html', {
- 'server': os.uname()[1],
- 'cache_server': request.META['REMOTE_ADDR'] or None,
- 'client_ip': get_client_ip(request),
- 'django_version': django.get_version(),
- })
+ return render(request,'core/system_information.html', {
+ 'server': os.uname()[1],
+ 'cache_server': request.META['REMOTE_ADDR'] or None,
+ 'client_ip': get_client_ip(request),
+ 'django_version': django.get_version(),
+ })
# Sync timestamp for automirror. Keep it around for 30 seconds
# Basically just a check that we can access the backend still...
@cache(seconds=30)
def sync_timestamp(request):
- s = datetime.now().strftime("%Y-%m-%d %H:%M:%S\n")
- r = HttpResponse(s, content_type='text/plain')
- r['Content-Length'] = len(s)
- return r
+ s = datetime.now().strftime("%Y-%m-%d %H:%M:%S\n")
+ r = HttpResponse(s, content_type='text/plain')
+ r['Content-Length'] = len(s)
+ return r
# List of all unapproved objects, for the special admin page
@login_required
@user_passes_test(lambda u: u.is_staff)
@user_passes_test(lambda u: u.groups.filter(name='pgweb moderators').exists())
def admin_pending(request):
- return render(request, 'core/admin_pending.html', {
- 'app_list': get_all_pending_moderations(),
- })
+ return render(request, 'core/admin_pending.html', {
+ 'app_list': get_all_pending_moderations(),
+ })
# Purge objects from varnish, for the admin pages
@login_required
@user_passes_test(lambda u: u.is_staff)
@user_passes_test(lambda u: u.groups.filter(name='varnish purgers').exists())
def admin_purge(request):
- if request.method == 'POST':
- url = request.POST['url']
- expr = request.POST['expr']
- xkey = request.POST['xkey']
- l = len(filter(None, [url, expr, xkey]))
- if l == 0:
- # Nothing specified
- return HttpResponseRedirect('.')
- elif l > 1:
- messages.error(request, "Can only specify one of url, expression and xkey!")
- return HttpResponseRedirect('.')
-
- if url:
- varnish_purge(url)
- elif expr:
- varnish_purge_expr(expr)
- else:
- varnish_purge_xkey(xkey)
-
- messages.info(request, "Purge added.")
- return HttpResponseRedirect('.')
-
- # Fetch list of latest purges
- curs = connection.cursor()
- curs.execute("SELECT added, completed, consumer, CASE WHEN mode = 'K' THEN 'XKey' WHEN mode='P' THEN 'URL' ELSE 'Expression' END, expr FROM varnishqueue.queue q LEFT JOIN varnishqueue.consumers c ON c.consumerid=q.consumerid ORDER BY added DESC")
- latest = curs.fetchall()
-
- return render(request, 'core/admin_purge.html', {
- 'latest_purges': latest,
- })
+ if request.method == 'POST':
+ url = request.POST['url']
+ expr = request.POST['expr']
+ xkey = request.POST['xkey']
+ l = len(filter(None, [url, expr, xkey]))
+ if l == 0:
+ # Nothing specified
+ return HttpResponseRedirect('.')
+ elif l > 1:
+ messages.error(request, "Can only specify one of url, expression and xkey!")
+ return HttpResponseRedirect('.')
+
+ if url:
+ varnish_purge(url)
+ elif expr:
+ varnish_purge_expr(expr)
+ else:
+ varnish_purge_xkey(xkey)
+
+ messages.info(request, "Purge added.")
+ return HttpResponseRedirect('.')
+
+ # Fetch list of latest purges
+ curs = connection.cursor()
+ curs.execute("SELECT added, completed, consumer, CASE WHEN mode = 'K' THEN 'XKey' WHEN mode='P' THEN 'URL' ELSE 'Expression' END, expr FROM varnishqueue.queue q LEFT JOIN varnishqueue.consumers c ON c.consumerid=q.consumerid ORDER BY added DESC")
+ latest = curs.fetchall()
+
+ return render(request, 'core/admin_purge.html', {
+ 'latest_purges': latest,
+ })
@csrf_exempt
def api_varnish_purge(request):
- if not request.META['REMOTE_ADDR'] in settings.VARNISH_PURGERS:
- return HttpServerError(request, "Invalid client address")
- if request.method != 'POST':
- return HttpServerError(request, "Can't use this way")
- n = int(request.POST['n'])
- curs = connection.cursor()
- for i in range(0, n):
- expr = request.POST['p%s' % i]
- curs.execute("SELECT varnish_purge_expr(%s)", (expr, ))
- return HttpResponse("Purged %s entries\n" % n)
+ if not request.META['REMOTE_ADDR'] in settings.VARNISH_PURGERS:
+ return HttpServerError(request, "Invalid client address")
+ if request.method != 'POST':
+ return HttpServerError(request, "Can't use this way")
+ n = int(request.POST['n'])
+ curs = connection.cursor()
+ for i in range(0, n):
+ expr = request.POST['p%s' % i]
+ curs.execute("SELECT varnish_purge_expr(%s)", (expr, ))
+ return HttpResponse("Purged %s entries\n" % n)
# Merge two organisations
@login_required
@user_passes_test(lambda u: u.is_superuser)
@transaction.atomic
def admin_mergeorg(request):
- if request.method == 'POST':
- form = MergeOrgsForm(data=request.POST)
- if form.is_valid():
- # Ok, try to actually merge organisations, by moving all objects
- # attached
- f = form.cleaned_data['merge_from']
- t = form.cleaned_data['merge_into']
- for e in f.event_set.all():
- e.org = t
- e.save()
- for n in f.newsarticle_set.all():
- n.org = t
- n.save()
- for p in f.product_set.all():
- p.org = t
- p.save()
- for p in f.professionalservice_set.all():
- p.organisation = t
- p.save()
- # Now that everything is moved, we can delete the organisation
- f.delete()
-
- return HttpResponseRedirect("/admin/core/organisation/")
- # Else fall through to re-render form with errors
- else:
- form = MergeOrgsForm()
-
- return render(request, 'core/admin_mergeorg.html', {
- 'form': form,
+ if request.method == 'POST':
+ form = MergeOrgsForm(data=request.POST)
+ if form.is_valid():
+ # Ok, try to actually merge organisations, by moving all objects
+ # attached
+ f = form.cleaned_data['merge_from']
+ t = form.cleaned_data['merge_into']
+ for e in f.event_set.all():
+ e.org = t
+ e.save()
+ for n in f.newsarticle_set.all():
+ n.org = t
+ n.save()
+ for p in f.product_set.all():
+ p.org = t
+ p.save()
+ for p in f.professionalservice_set.all():
+ p.organisation = t
+ p.save()
+ # Now that everything is moved, we can delete the organisation
+ f.delete()
+
+ return HttpResponseRedirect("/admin/core/organisation/")
+ # Else fall through to re-render form with errors
+ else:
+ form = MergeOrgsForm()
+
+ return render(request, 'core/admin_mergeorg.html', {
+ 'form': form,
})
from django import forms
class DocCommentForm(forms.Form):
- name = forms.CharField(max_length=100, required=True, label='Your Name')
- email = forms.EmailField(max_length=100, required=True, label='Your Email')
- shortdesc = forms.CharField(max_length=100, required=True, label="Subject")
- details = forms.CharField(required=True, widget=forms.Textarea,
- label="What is your comment?")
+ name = forms.CharField(max_length=100, required=True, label='Your Name')
+ email = forms.EmailField(max_length=100, required=True, label='Your Email')
+ shortdesc = forms.CharField(max_length=100, required=True, label="Subject")
+ details = forms.CharField(required=True, widget=forms.Textarea,
+ label="What is your comment?")
'verbose_name_plural': 'Doc page aliases',
},
),
- migrations.RunSQL("CREATE UNIQUE INDEX docsalias_unique ON docsalias (LEAST(file1, file2), GREATEST(file1, file2))"),
+ migrations.RunSQL("CREATE UNIQUE INDEX docsalias_unique ON docsalias (LEAST(file1, file2), GREATEST(file1, file2))"),
]
from pgweb.core.models import Version
class DocPage(models.Model):
- id = models.AutoField(null=False, primary_key=True)
- file = models.CharField(max_length=64, null=False, blank=False)
- version = models.ForeignKey(Version, null=False, blank=False, db_column='version', to_field='tree')
- title = models.CharField(max_length=256, null=True, blank=True)
- content = models.TextField(null=True, blank=True)
+ id = models.AutoField(null=False, primary_key=True)
+ file = models.CharField(max_length=64, null=False, blank=False)
+ version = models.ForeignKey(Version, null=False, blank=False, db_column='version', to_field='tree')
+ title = models.CharField(max_length=256, null=True, blank=True)
+ content = models.TextField(null=True, blank=True)
- def display_version(self):
- """Version as used for displaying and in URLs"""
- if self.version.tree == 0:
- return 'devel'
- else:
- return str(self.version.numtree)
+ def display_version(self):
+ """Version as used for displaying and in URLs"""
+ if self.version.tree == 0:
+ return 'devel'
+ else:
+ return str(self.version.numtree)
- class Meta:
- db_table = 'docs'
- # Index file first, because we want to list versions by file
- unique_together = [('file', 'version')]
+ class Meta:
+ db_table = 'docs'
+ # Index file first, because we want to list versions by file
+ unique_together = [('file', 'version')]
class DocPageAlias(models.Model):
- file1 = models.CharField(max_length=64, null=False, blank=False, unique=True)
- file2 = models.CharField(max_length=64, null=False, blank=False, unique=True)
+ file1 = models.CharField(max_length=64, null=False, blank=False, unique=True)
+ file2 = models.CharField(max_length=64, null=False, blank=False, unique=True)
- def __unicode__(self):
- return u"%s <-> %s" % (self.file1, self.file2)
+ def __unicode__(self):
+ return u"%s <-> %s" % (self.file1, self.file2)
- # XXX: needs a unique functional index as well, see the migration!
- class Meta:
- db_table = 'docsalias'
- verbose_name_plural='Doc page aliases'
+ # XXX: needs a unique functional index as well, see the migration!
+ class Meta:
+ db_table = 'docsalias'
+ verbose_name_plural='Doc page aliases'
from pgweb.core.models import Version
def get_struct():
- currentversion = Version.objects.get(current=True)
-
- # Can't use a model here, because we don't (for some reason) have a
- # hard link to the versions table here
- # Make sure we exclude the /devel/ docs because they are blocked by
- # robots.txt, and thus will cause tohusands of warnings in search
- # engines.
- curs = connection.cursor()
- curs.execute("SELECT d.version, d.file, v.docsloaded, v.testing FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version > 0 ORDER BY d.version DESC")
-
- # Start priority is higher than average but lower than what we assign
- # to the current version of the docs.
- docprio = 0.8
- lastversion = None
-
- for version, filename, loaded, testing in curs.fetchall():
- # Decrease the priority with 0.1 for every version of the docs
- # we move back in time, until we reach 0.1. At 0.1 it's unlikely
- # to show up in a general search, but still possible to reach
- # through version specific searching for example.
- if lastversion != version:
- if docprio > 0.2:
- docprio -= 0.1
- lastversion = version
-
- if version >= 10:
- version = int(version)
-
- yield ('docs/%s/%s' % (version, filename),
- testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio
- loaded)
-
- # Also yield the current version urls, with the highest
- # possible priority
- if version == currentversion.tree:
- yield ('docs/current/%s' % filename,
- 1.0, loaded)
+ currentversion = Version.objects.get(current=True)
+
+ # Can't use a model here, because we don't (for some reason) have a
+ # hard link to the versions table here
+ # Make sure we exclude the /devel/ docs because they are blocked by
+ # robots.txt, and thus will cause tohusands of warnings in search
+ # engines.
+ curs = connection.cursor()
+ curs.execute("SELECT d.version, d.file, v.docsloaded, v.testing FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version > 0 ORDER BY d.version DESC")
+
+ # Start priority is higher than average but lower than what we assign
+ # to the current version of the docs.
+ docprio = 0.8
+ lastversion = None
+
+ for version, filename, loaded, testing in curs.fetchall():
+ # Decrease the priority with 0.1 for every version of the docs
+ # we move back in time, until we reach 0.1. At 0.1 it's unlikely
+ # to show up in a general search, but still possible to reach
+ # through version specific searching for example.
+ if lastversion != version:
+ if docprio > 0.2:
+ docprio -= 0.1
+ lastversion = version
+
+ if version >= 10:
+ version = int(version)
+
+ yield ('docs/%s/%s' % (version, filename),
+ testing and 0.1 or docprio, # beta/rc versions always get 0.1 in prio
+ loaded)
+
+ # Also yield the current version urls, with the highest
+ # possible priority
+ if version == currentversion.tree:
+ yield ('docs/current/%s' % filename,
+ 1.0, loaded)
# For our internal sitemap (used only by our own search engine),
# include the devel version of the docs (and only those, since the
# other versions are already included)
def get_internal_struct():
- curs = connection.cursor()
- curs.execute("SELECT d.file, v.docsloaded FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version = 0")
+ curs = connection.cursor()
+ curs.execute("SELECT d.file, v.docsloaded FROM docs d INNER JOIN core_version v ON v.tree=d.version WHERE version = 0")
- for filename, loaded in curs.fetchall():
- yield ('docs/devel/%s' % (filename, ),
- 0.1,
- loaded)
+ for filename, loaded in curs.fetchall():
+ yield ('docs/devel/%s' % (filename, ),
+ 0.1,
+ loaded)
@allow_frames
@content_sources('style', "'unsafe-inline'")
def docpage(request, version, filename):
- loaddate = None
- # Get the current version both to map the /current/ url, and to later
- # determine if we allow comments on this page.
- currver = Version.objects.filter(current=True)[0].tree
- if version == 'current':
- ver = currver
- elif version == 'devel':
- ver = Decimal(0)
- loaddate = Version.objects.get(tree=Decimal(0)).docsloaded
- else:
- ver = Decimal(version)
- if ver == Decimal(0):
- raise Http404("Version not found")
-
- if ver < Decimal("7.1") and ver > Decimal(0):
- extension = "htm"
- else:
- extension = "html"
-
- if ver < Decimal("7.1") and ver > Decimal(0):
- indexname = "postgres.htm"
- elif ver == Decimal("7.1"):
- indexname = "postgres.html"
- else:
- indexname = "index.html"
-
- if ver >= 10 and version.find('.') > -1:
- # Version 10 and up, but specified as 10.0 / 11.0 etc, so redirect back without the
- # decimal.
- return HttpResponsePermanentRedirect("/docs/{0}/{1}.html".format(int(ver), filename))
-
- fullname = "%s.%s" % (filename, extension)
- page = get_object_or_404(DocPage, version=ver, file=fullname)
- versions = DocPage.objects.extra(
- where=["file=%s OR file IN (SELECT file2 FROM docsalias WHERE file1=%s) OR file IN (SELECT file1 FROM docsalias WHERE file2=%s)"],
- params=[fullname, fullname, fullname],
- select={
- 'supported':"COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')",
- 'testing':"COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)",
- }).order_by('-supported', 'version').only('version', 'file')
-
- return render(request, 'docs/docspage.html', {
- 'page': page,
- 'supported_versions': [v for v in versions if v.supported],
- 'devel_versions': [v for v in versions if not v.supported and v.testing],
- 'unsupported_versions': [v for v in versions if not v.supported and not v.testing],
- 'title': page.title,
- 'doc_index_filename': indexname,
- 'loaddate': loaddate,
- })
+ loaddate = None
+ # Get the current version both to map the /current/ url, and to later
+ # determine if we allow comments on this page.
+ currver = Version.objects.filter(current=True)[0].tree
+ if version == 'current':
+ ver = currver
+ elif version == 'devel':
+ ver = Decimal(0)
+ loaddate = Version.objects.get(tree=Decimal(0)).docsloaded
+ else:
+ ver = Decimal(version)
+ if ver == Decimal(0):
+ raise Http404("Version not found")
+
+ if ver < Decimal("7.1") and ver > Decimal(0):
+ extension = "htm"
+ else:
+ extension = "html"
+
+ if ver < Decimal("7.1") and ver > Decimal(0):
+ indexname = "postgres.htm"
+ elif ver == Decimal("7.1"):
+ indexname = "postgres.html"
+ else:
+ indexname = "index.html"
+
+ if ver >= 10 and version.find('.') > -1:
+ # Version 10 and up, but specified as 10.0 / 11.0 etc, so redirect back without the
+ # decimal.
+ return HttpResponsePermanentRedirect("/docs/{0}/{1}.html".format(int(ver), filename))
+
+ fullname = "%s.%s" % (filename, extension)
+ page = get_object_or_404(DocPage, version=ver, file=fullname)
+ versions = DocPage.objects.extra(
+ where=["file=%s OR file IN (SELECT file2 FROM docsalias WHERE file1=%s) OR file IN (SELECT file1 FROM docsalias WHERE file2=%s)"],
+ params=[fullname, fullname, fullname],
+ select={
+ 'supported':"COALESCE((SELECT supported FROM core_version v WHERE v.tree=version), 'f')",
+ 'testing':"COALESCE((SELECT testing FROM core_version v WHERE v.tree=version),0)",
+ }).order_by('-supported', 'version').only('version', 'file')
+
+ return render(request, 'docs/docspage.html', {
+ 'page': page,
+ 'supported_versions': [v for v in versions if v.supported],
+ 'devel_versions': [v for v in versions if not v.supported and v.testing],
+ 'unsupported_versions': [v for v in versions if not v.supported and not v.testing],
+ 'title': page.title,
+ 'doc_index_filename': indexname,
+ 'loaddate': loaddate,
+ })
def docspermanentredirect(request, version, typ, page, *args):
- """Provides a permanent redirect from the old static/interactive pages to
- the modern pages that do not have said keywords.
- """
- url = "/docs/%s/" % version
- if page:
- url += page
- return HttpResponsePermanentRedirect(url)
+ """Provides a permanent redirect from the old static/interactive pages to
+ the modern pages that do not have said keywords.
+ """
+ url = "/docs/%s/" % version
+ if page:
+ url += page
+ return HttpResponsePermanentRedirect(url)
def docsrootpage(request, version):
- return docpage(request, version, 'index')
+ return docpage(request, version, 'index')
def redirect_root(request, version):
- return HttpResponsePermanentRedirect("/docs/%s/" % version)
+ return HttpResponsePermanentRedirect("/docs/%s/" % version)
def root(request):
- versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
- return render_pgweb(request, 'docs', 'docs/index.html', {
- 'versions': versions,
- })
+ versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
+ return render_pgweb(request, 'docs', 'docs/index.html', {
+ 'versions': versions,
+ })
class _VersionPdfWrapper(object):
- """
- A wrapper around a version that knows to look for PDF files, and
- return their sizes.
- """
- def __init__(self, version):
- self.__version = version
- self.a4pdf = self._find_pdf('A4')
- self.uspdf = self._find_pdf('US')
- # Some versions have, ahem, strange index filenames
- if self.__version.tree < Decimal('6.4'):
- self.indexname = 'book01.htm'
- elif self.__version.tree < Decimal('7.0'):
- self.indexname = 'postgres.htm'
- elif self.__version.tree < Decimal('7.2'):
- self.indexname = 'postgres.html'
- else:
- self.indexname = 'index.html'
- def __getattr__(self, name):
- return getattr(self.__version, name)
- def _find_pdf(self, pagetype):
- try:
- return os.stat('%s/documentation/pdf/%s/postgresql-%s-%s.pdf' % (settings.STATIC_CHECKOUT, self.__version.numtree, self.__version.numtree, pagetype)).st_size
- except:
- return 0
+ """
+ A wrapper around a version that knows to look for PDF files, and
+ return their sizes.
+ """
+ def __init__(self, version):
+ self.__version = version
+ self.a4pdf = self._find_pdf('A4')
+ self.uspdf = self._find_pdf('US')
+ # Some versions have, ahem, strange index filenames
+ if self.__version.tree < Decimal('6.4'):
+ self.indexname = 'book01.htm'
+ elif self.__version.tree < Decimal('7.0'):
+ self.indexname = 'postgres.htm'
+ elif self.__version.tree < Decimal('7.2'):
+ self.indexname = 'postgres.html'
+ else:
+ self.indexname = 'index.html'
+ def __getattr__(self, name):
+ return getattr(self.__version, name)
+ def _find_pdf(self, pagetype):
+ try:
+ return os.stat('%s/documentation/pdf/%s/postgresql-%s-%s.pdf' % (settings.STATIC_CHECKOUT, self.__version.numtree, self.__version.numtree, pagetype)).st_size
+ except:
+ return 0
def manuals(request):
- versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
- return render_pgweb(request, 'docs', 'docs/manuals.html', {
- 'versions': [_VersionPdfWrapper(v) for v in versions],
- })
+ versions = Version.objects.filter(Q(supported=True) | Q(testing__gt=0,tree__gt=0)).order_by('-tree')
+ return render_pgweb(request, 'docs', 'docs/manuals.html', {
+ 'versions': [_VersionPdfWrapper(v) for v in versions],
+ })
def manualarchive(request):
- versions = Version.objects.filter(testing=0,supported=False,tree__gt=0).order_by('-tree')
- return render_pgweb(request, 'docs', 'docs/archive.html', {
- 'versions': [_VersionPdfWrapper(v) for v in versions],
- })
+ versions = Version.objects.filter(testing=0,supported=False,tree__gt=0).order_by('-tree')
+ return render_pgweb(request, 'docs', 'docs/archive.html', {
+ 'versions': [_VersionPdfWrapper(v) for v in versions],
+ })
@login_required
def commentform(request, itemid, version, filename):
- v = get_object_or_404(Version, tree=version)
- if not v.supported:
- # No docs comments on unsupported versions
- return HttpResponseRedirect("/docs/{0}/{1}".format(version, filename))
-
- if request.method == 'POST':
- form = DocCommentForm(request.POST)
- if form.is_valid():
- if version == '0.0':
- version = 'devel'
-
- send_template_mail(
- settings.DOCSREPORT_NOREPLY_EMAIL,
- settings.DOCSREPORT_EMAIL,
- '%s' % form.cleaned_data['shortdesc'],
- 'docs/docsbugmail.txt', {
- 'version': version,
- 'filename': filename,
- 'details': form.cleaned_data['details'],
- },
- usergenerated=True,
- cc=form.cleaned_data['email'],
- replyto='%s, %s' % (form.cleaned_data['email'], settings.DOCSREPORT_EMAIL),
- sendername='PG Doc comments form'
- )
- return render_pgweb(request, 'docs', 'docs/docsbug_completed.html', {})
- else:
- form = DocCommentForm(initial={
- 'name': '%s %s' % (request.user.first_name, request.user.last_name),
- 'email': request.user.email,
- })
-
- return render_pgweb(request, 'docs', 'base/form.html', {
- 'form': form,
- 'formitemtype': 'documentation comment',
- 'operation': 'Submit',
- 'form_intro': template_to_string('docs/docsbug.html', {
- 'user': request.user,
- }),
- 'savebutton': 'Send Email',
- })
+ v = get_object_or_404(Version, tree=version)
+ if not v.supported:
+ # No docs comments on unsupported versions
+ return HttpResponseRedirect("/docs/{0}/{1}".format(version, filename))
+
+ if request.method == 'POST':
+ form = DocCommentForm(request.POST)
+ if form.is_valid():
+ if version == '0.0':
+ version = 'devel'
+
+ send_template_mail(
+ settings.DOCSREPORT_NOREPLY_EMAIL,
+ settings.DOCSREPORT_EMAIL,
+ '%s' % form.cleaned_data['shortdesc'],
+ 'docs/docsbugmail.txt', {
+ 'version': version,
+ 'filename': filename,
+ 'details': form.cleaned_data['details'],
+ },
+ usergenerated=True,
+ cc=form.cleaned_data['email'],
+ replyto='%s, %s' % (form.cleaned_data['email'], settings.DOCSREPORT_EMAIL),
+ sendername='PG Doc comments form'
+ )
+ return render_pgweb(request, 'docs', 'docs/docsbug_completed.html', {})
+ else:
+ form = DocCommentForm(initial={
+ 'name': '%s %s' % (request.user.first_name, request.user.last_name),
+ 'email': request.user.email,
+ })
+
+ return render_pgweb(request, 'docs', 'base/form.html', {
+ 'form': form,
+ 'formitemtype': 'documentation comment',
+ 'operation': 'Submit',
+ 'form_intro': template_to_string('docs/docsbug.html', {
+ 'user': request.user,
+ }),
+ 'savebutton': 'Send Email',
+ })
from models import StackBuilderApp, Category, Product, LicenceType
class ProductAdmin(PgwebAdmin):
- list_display = ('name', 'org', 'approved', 'lastconfirmed',)
- list_filter = ('approved',)
- search_fields = ('name', 'description', )
- ordering = ('name', )
+ list_display = ('name', 'org', 'approved', 'lastconfirmed',)
+ list_filter = ('approved',)
+ search_fields = ('name', 'description', )
+ ordering = ('name', )
def duplicate_stackbuilderapp(modeladmin, request, queryset):
- # Duplicate each individual selected object, but turn off
- # the active flag if it's on.
- for o in queryset:
- o.id = None # Triggers creation of a new object
- o.active = False
- o.textid = o.textid + "_new"
- o.save()
+ # Duplicate each individual selected object, but turn off
+ # the active flag if it's on.
+ for o in queryset:
+ o.id = None # Triggers creation of a new object
+ o.active = False
+ o.textid = o.textid + "_new"
+ o.save()
duplicate_stackbuilderapp.short_description = "Duplicate application"
class StackBuilderAppAdminForm(forms.ModelForm):
- class Meta:
- model = StackBuilderApp
- exclude = ()
+ class Meta:
+ model = StackBuilderApp
+ exclude = ()
- def clean_textid(self):
- if not re.match('^[a-z0-9_]*$', self.cleaned_data['textid']):
- raise ValidationError('Only lowerchase characters, numbers and underscore allowed!')
- return self.cleaned_data['textid']
+ def clean_textid(self):
+ if not re.match('^[a-z0-9_]*$', self.cleaned_data['textid']):
+ raise ValidationError('Only lowerchase characters, numbers and underscore allowed!')
+ return self.cleaned_data['textid']
- def clean_txtdependencies(self):
- if len(self.cleaned_data['txtdependencies']) == 0:
- return ''
+ def clean_txtdependencies(self):
+ if len(self.cleaned_data['txtdependencies']) == 0:
+ return ''
- deplist = self.cleaned_data['txtdependencies'].split(',')
- if len(deplist) != len(set(deplist)):
- raise ValidationError('Duplicate dependencies not allowed!')
+ deplist = self.cleaned_data['txtdependencies'].split(',')
+ if len(deplist) != len(set(deplist)):
+ raise ValidationError('Duplicate dependencies not allowed!')
- for d in deplist:
- if not StackBuilderApp.objects.filter(textid=d).exists():
- raise ValidationError("Dependency '%s' does not exist!" % d)
- return self.cleaned_data['txtdependencies']
+ for d in deplist:
+ if not StackBuilderApp.objects.filter(textid=d).exists():
+ raise ValidationError("Dependency '%s' does not exist!" % d)
+ return self.cleaned_data['txtdependencies']
class StackBuilderAppAdmin(admin.ModelAdmin):
- list_display = ('textid', 'active', 'name', 'platform', 'version', )
- actions = [duplicate_stackbuilderapp, ]
- form = StackBuilderAppAdminForm
+ list_display = ('textid', 'active', 'name', 'platform', 'version', )
+ actions = [duplicate_stackbuilderapp, ]
+ form = StackBuilderAppAdminForm
admin.site.register(Category)
admin.site.register(LicenceType)
from models import Product
class ProductForm(forms.ModelForm):
- form_intro = """Note that in order to register a new product, you must first register an organisation.
+ form_intro = """Note that in order to register a new product, you must first register an organisation.
If you have not done so, use <a href="/account/organisations/new/">this form</a>."""
- def __init__(self, *args, **kwargs):
- super(ProductForm, self).__init__(*args, **kwargs)
- def filter_by_user(self, user):
- self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
- class Meta:
- model = Product
- exclude = ('lastconfirmed', 'approved', )
+ def __init__(self, *args, **kwargs):
+ super(ProductForm, self).__init__(*args, **kwargs)
+ def filter_by_user(self, user):
+ self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+ class Meta:
+ model = Product
+ exclude = ('lastconfirmed', 'approved', )
class Category(models.Model):
- catname = models.CharField(max_length=100, null=False, blank=False)
- blurb = models.TextField(null=False, blank=True)
+ catname = models.CharField(max_length=100, null=False, blank=False)
+ blurb = models.TextField(null=False, blank=True)
- def __unicode__(self):
- return self.catname
+ def __unicode__(self):
+ return self.catname
- class Meta:
- ordering = ('catname',)
+ class Meta:
+ ordering = ('catname',)
class LicenceType(models.Model):
- typename = models.CharField(max_length=100, null=False, blank=False)
+ typename = models.CharField(max_length=100, null=False, blank=False)
- def __unicode__(self):
- return self.typename
+ def __unicode__(self):
+ return self.typename
- class Meta:
- ordering = ('typename',)
+ class Meta:
+ ordering = ('typename',)
class Product(models.Model):
- name = models.CharField(max_length=100, null=False, blank=False, unique=True)
- approved = models.BooleanField(null=False, default=False)
- org = models.ForeignKey(Organisation, db_column="publisher_id", null=False, verbose_name="Organisation")
- url = models.URLField(null=False, blank=False)
- category = models.ForeignKey(Category, null=False)
- licencetype = models.ForeignKey(LicenceType, null=False, verbose_name="Licence type")
- description = models.TextField(null=False, blank=False)
- price = models.CharField(max_length=200, null=False, blank=True)
- lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True)
+ name = models.CharField(max_length=100, null=False, blank=False, unique=True)
+ approved = models.BooleanField(null=False, default=False)
+ org = models.ForeignKey(Organisation, db_column="publisher_id", null=False, verbose_name="Organisation")
+ url = models.URLField(null=False, blank=False)
+ category = models.ForeignKey(Category, null=False)
+ licencetype = models.ForeignKey(LicenceType, null=False, verbose_name="Licence type")
+ description = models.TextField(null=False, blank=False)
+ price = models.CharField(max_length=200, null=False, blank=True)
+ lastconfirmed = models.DateTimeField(null=False, blank=False, auto_now_add=True)
- send_notification = True
- markdown_fields = ('description', )
+ send_notification = True
+ markdown_fields = ('description', )
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
- def verify_submitter(self, user):
- return (len(self.org.managers.filter(pk=user.pk)) == 1)
+ def verify_submitter(self, user):
+ return (len(self.org.managers.filter(pk=user.pk)) == 1)
- class Meta:
- ordering = ('name',)
+ class Meta:
+ ordering = ('name',)
class StackBuilderApp(models.Model):
- textid = models.CharField(max_length=100, null=False, blank=False)
- version = models.CharField(max_length=20, null=False, blank=False)
- platform = models.CharField(max_length=20, null=False, blank=False,
- choices= (('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), ('osx', 'Mac OS X'),
- ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
- )
- secondaryplatform = models.CharField(max_length=20, null=False, blank=True,
- choices= (('', 'None'), ('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'),
- ('osx', 'Mac OS X'), ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
- )
- name = models.CharField(max_length=500, null=False, blank=False)
- active = models.BooleanField(null=False, blank=False, default=True)
- description = models.TextField(null=False, blank=False)
- category = models.CharField(max_length=100, null=False, blank=False)
- pgversion = models.CharField(max_length=5, null=False, blank=True)
- edbversion = models.CharField(max_length=5, null=False, blank=True)
- format = models.CharField(max_length=5, null=False, blank=False,
- choices = (('bin', 'Linux .bin'), ('app', 'Mac .app'),
- ('pkg', 'Mac .pkg'), ('mpkg', 'Mac .mpkg'),
- ('exe', 'Windows .exe'), ('msi', 'Windows .msi'))
- )
- installoptions = models.CharField(max_length=500, null=False, blank=True)
- upgradeoptions = models.CharField(max_length=500, null=False, blank=True)
- checksum = models.CharField(max_length=32, null=False, blank=False)
- mirrorpath = models.CharField(max_length=500, null=False, blank=True)
- alturl = models.URLField(max_length=500, null=False, blank=True)
- txtdependencies = models.CharField(max_length=1000, null=False, blank=True,
- verbose_name='Dependencies',
- help_text='Comma separated list of text dependencies, no spaces!')
- versionkey = models.CharField(max_length=500, null=False, blank=False)
- manifesturl = models.URLField(max_length=500, null=False, blank=True)
-
- purge_urls = ('/applications-v2.xml', )
-
- def __unicode__(self):
- return "%s %s %s" % (self.textid, self.version, self.platform)
-
- class Meta:
- unique_together = ('textid', 'version', 'platform', )
- ordering = ('textid', 'name', 'platform', )
+ textid = models.CharField(max_length=100, null=False, blank=False)
+ version = models.CharField(max_length=20, null=False, blank=False)
+ platform = models.CharField(max_length=20, null=False, blank=False,
+ choices= (('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'), ('osx', 'Mac OS X'),
+ ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
+ )
+ secondaryplatform = models.CharField(max_length=20, null=False, blank=True,
+ choices= (('', 'None'), ('windows', 'Windows (32-bit)'), ('windows-x64', 'Windows (64-bit)'),
+ ('osx', 'Mac OS X'), ('linux', 'Linux (32-bit)'), ('linux-x64', 'Linux (64-bit)'))
+ )
+ name = models.CharField(max_length=500, null=False, blank=False)
+ active = models.BooleanField(null=False, blank=False, default=True)
+ description = models.TextField(null=False, blank=False)
+ category = models.CharField(max_length=100, null=False, blank=False)
+ pgversion = models.CharField(max_length=5, null=False, blank=True)
+ edbversion = models.CharField(max_length=5, null=False, blank=True)
+ format = models.CharField(max_length=5, null=False, blank=False,
+ choices = (('bin', 'Linux .bin'), ('app', 'Mac .app'),
+ ('pkg', 'Mac .pkg'), ('mpkg', 'Mac .mpkg'),
+ ('exe', 'Windows .exe'), ('msi', 'Windows .msi'))
+ )
+ installoptions = models.CharField(max_length=500, null=False, blank=True)
+ upgradeoptions = models.CharField(max_length=500, null=False, blank=True)
+ checksum = models.CharField(max_length=32, null=False, blank=False)
+ mirrorpath = models.CharField(max_length=500, null=False, blank=True)
+ alturl = models.URLField(max_length=500, null=False, blank=True)
+ txtdependencies = models.CharField(max_length=1000, null=False, blank=True,
+ verbose_name='Dependencies',
+ help_text='Comma separated list of text dependencies, no spaces!')
+ versionkey = models.CharField(max_length=500, null=False, blank=False)
+ manifesturl = models.URLField(max_length=500, null=False, blank=True)
+
+ purge_urls = ('/applications-v2.xml', )
+
+ def __unicode__(self):
+ return "%s %s %s" % (self.textid, self.version, self.platform)
+
+ class Meta:
+ unique_together = ('textid', 'version', 'platform', )
+ ordering = ('textid', 'name', 'platform', )
from models import Category
def get_struct():
- # Products
- for c in Category.objects.all():
- yield ('download/products/%s/' % c.id,
- 0.3)
+ # Products
+ for c in Category.objects.all():
+ yield ('download/products/%s/' % c.id,
+ 0.3)
- # Don't index the ftp browser for now - it doesn't really contain
- # anything useful to search
+ # Don't index the ftp browser for now - it doesn't really contain
+ # anything useful to search
# FTP browser
#######
def ftpbrowser(request, subpath):
- if subpath:
- # An actual path has been selected. Fancy!
-
- if subpath.find('..') > -1:
- # Just claim it doesn't exist if the user tries to do this
- # type of bad thing
- raise Http404
- subpath = subpath.strip('/')
- else:
- subpath=""
-
- # Pickle up the list of things we need
- try:
- f = open(settings.FTP_PICKLE, "rb")
- allnodes = pickle.load(f)
- f.close()
- except Exception, e:
- return HttpServerError(request, "Failed to load ftp site information: %s" % e)
-
- # An incoming subpath may either be canonical, or have one or more elements
- # present that are actually symlinks. For each element of the path, test to
- # see if it is present in the pickle. If not, look for a symlink entry with
- # and if present, replace the original entry with the symlink target.
- canonpath = ''
- if subpath != '':
- parent = ''
- for d in subpath.split('/'):
- # Check if allnodes contains a node matching the path
- if allnodes[parent].has_key(d):
- if allnodes[parent][d]['t'] == 'd':
- canonpath = os.path.join(canonpath, d)
- elif allnodes[parent][d]['t'] == 'l':
- canonpath = os.path.join(canonpath, allnodes[parent][d]['d']).strip('/')
- else:
- # There's a matching node, but it's not a link or a directory
- raise Http404
-
- parent = canonpath
- else:
- # There's no matching node
- raise Http404
-
- # If we wound up with a canonical path that doesn't match the original request,
- # redirect the user
- canonpath = canonpath.strip('/')
- if subpath != canonpath:
- return HttpResponseRedirect('/ftp/' + canonpath)
-
- node = allnodes[subpath]
- del allnodes
-
- # Add all directories
- directories = [{'link': k, 'url': k, 'type': 'd'} for k,v in node.items() if v['t'] == 'd']
- # Add all symlinks (only directories supported)
- directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k,v in node.items() if v['t'] == 'l'])
-
- # A ittle early sorting wouldn't go amiss, so .. ends up at the top
- directories.sort(key = version_sort, reverse=True)
-
- # Add a link to the parent directory
- if subpath:
- directories.insert(0, {'link':'[Parent Directory]', 'url':'..'})
-
- # Fetch files
- files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k,v in node.items() if v['t'] == 'f']
-
- breadcrumbs = []
- if subpath:
- breadroot = ""
- for pathpiece in subpath.split('/'):
- if not pathpiece:
- # Trailing slash will give out an empty pathpiece
- continue
- if breadroot:
- breadroot = "%s/%s" % (breadroot, pathpiece)
- else:
- breadroot = pathpiece
- breadcrumbs.append({'name': pathpiece, 'path': breadroot});
-
- # Check if there are any "content files" we should render directly on the webpage
- file_readme = (node.has_key('README') and node['README']['t']=='f') and node['README']['c'] or None;
- file_message = (node.has_key('.message') and node['.message']['t']=='f') and node['.message']['c'] or None;
- file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None;
-
- del node
-
- return render_pgweb(request, 'download', 'downloads/ftpbrowser.html', {
- 'basepath': subpath.rstrip('/'),
- 'directories': directories,
- 'files': sorted(files),
- 'breadcrumbs': breadcrumbs,
- 'readme': file_readme,
- 'messagefile': file_message,
- 'maintainer': file_maintainer,
- })
+ if subpath:
+ # An actual path has been selected. Fancy!
+
+ if subpath.find('..') > -1:
+ # Just claim it doesn't exist if the user tries to do this
+ # type of bad thing
+ raise Http404
+ subpath = subpath.strip('/')
+ else:
+ subpath=""
+
+ # Pickle up the list of things we need
+ try:
+ f = open(settings.FTP_PICKLE, "rb")
+ allnodes = pickle.load(f)
+ f.close()
+ except Exception, e:
+ return HttpServerError(request, "Failed to load ftp site information: %s" % e)
+
+ # An incoming subpath may either be canonical, or have one or more elements
+ # present that are actually symlinks. For each element of the path, test to
+ # see if it is present in the pickle. If not, look for a symlink entry with
+ # and if present, replace the original entry with the symlink target.
+ canonpath = ''
+ if subpath != '':
+ parent = ''
+ for d in subpath.split('/'):
+ # Check if allnodes contains a node matching the path
+ if allnodes[parent].has_key(d):
+ if allnodes[parent][d]['t'] == 'd':
+ canonpath = os.path.join(canonpath, d)
+ elif allnodes[parent][d]['t'] == 'l':
+ canonpath = os.path.join(canonpath, allnodes[parent][d]['d']).strip('/')
+ else:
+ # There's a matching node, but it's not a link or a directory
+ raise Http404
+
+ parent = canonpath
+ else:
+ # There's no matching node
+ raise Http404
+
+ # If we wound up with a canonical path that doesn't match the original request,
+ # redirect the user
+ canonpath = canonpath.strip('/')
+ if subpath != canonpath:
+ return HttpResponseRedirect('/ftp/' + canonpath)
+
+ node = allnodes[subpath]
+ del allnodes
+
+ # Add all directories
+ directories = [{'link': k, 'url': k, 'type': 'd'} for k,v in node.items() if v['t'] == 'd']
+ # Add all symlinks (only directories supported)
+ directories.extend([{'link': k, 'url': v['d'], 'type': 'l'} for k,v in node.items() if v['t'] == 'l'])
+
+ # A ittle early sorting wouldn't go amiss, so .. ends up at the top
+ directories.sort(key = version_sort, reverse=True)
+
+ # Add a link to the parent directory
+ if subpath:
+ directories.insert(0, {'link':'[Parent Directory]', 'url':'..'})
+
+ # Fetch files
+ files = [{'name': k, 'mtime': v['d'], 'size': v['s']} for k,v in node.items() if v['t'] == 'f']
+
+ breadcrumbs = []
+ if subpath:
+ breadroot = ""
+ for pathpiece in subpath.split('/'):
+ if not pathpiece:
+ # Trailing slash will give out an empty pathpiece
+ continue
+ if breadroot:
+ breadroot = "%s/%s" % (breadroot, pathpiece)
+ else:
+ breadroot = pathpiece
+ breadcrumbs.append({'name': pathpiece, 'path': breadroot});
+
+ # Check if there are any "content files" we should render directly on the webpage
+ file_readme = (node.has_key('README') and node['README']['t']=='f') and node['README']['c'] or None;
+ file_message = (node.has_key('.message') and node['.message']['t']=='f') and node['.message']['c'] or None;
+ file_maintainer = (node.has_key('CURRENT_MAINTAINER') and node['CURRENT_MAINTAINER']['t'] == 'f') and node['CURRENT_MAINTAINER']['c'] or None;
+
+ del node
+
+ return render_pgweb(request, 'download', 'downloads/ftpbrowser.html', {
+ 'basepath': subpath.rstrip('/'),
+ 'directories': directories,
+ 'files': sorted(files),
+ 'breadcrumbs': breadcrumbs,
+ 'readme': file_readme,
+ 'messagefile': file_message,
+ 'maintainer': file_maintainer,
+ })
# Accept an upload of the ftpsite pickle. This is fairly resource consuming,
# file in parallel.
@csrf_exempt
def uploadftp(request):
- if request.method != 'PUT':
- return HttpServerError(request, "Invalid method")
- if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS:
- return HttpServerError(request, "Invalid client address")
- # We have the data in request.body. Attempt to load it as
- # a pickle to make sure it's properly formatted
- pickle.loads(request.body)
-
- # Next, check if it's the same as the current file
- f = open(settings.FTP_PICKLE, "rb")
- x = f.read()
- f.close()
- if x == request.body:
- # Don't rewrite the file or purge any data if nothing changed
- return HttpResponse("NOT CHANGED", content_type="text/plain")
-
- # File has changed - let's write it!
- f = open("%s.new" % settings.FTP_PICKLE, "wb")
- f.write(request.body)
- f.close()
- os.rename("%s.new" % settings.FTP_PICKLE, settings.FTP_PICKLE)
-
- # Purge it out of varnish so we start responding right away
- varnish_purge("/ftp")
-
- # Finally, indicate to the client that we're happy
- return HttpResponse("OK", content_type="text/plain")
+ if request.method != 'PUT':
+ return HttpServerError(request, "Invalid method")
+ if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS:
+ return HttpServerError(request, "Invalid client address")
+ # We have the data in request.body. Attempt to load it as
+ # a pickle to make sure it's properly formatted
+ pickle.loads(request.body)
+
+ # Next, check if it's the same as the current file
+ f = open(settings.FTP_PICKLE, "rb")
+ x = f.read()
+ f.close()
+ if x == request.body:
+ # Don't rewrite the file or purge any data if nothing changed
+ return HttpResponse("NOT CHANGED", content_type="text/plain")
+
+ # File has changed - let's write it!
+ f = open("%s.new" % settings.FTP_PICKLE, "wb")
+ f.write(request.body)
+ f.close()
+ os.rename("%s.new" % settings.FTP_PICKLE, settings.FTP_PICKLE)
+
+ # Purge it out of varnish so we start responding right away
+ varnish_purge("/ftp")
+
+ # Finally, indicate to the client that we're happy
+ return HttpResponse("OK", content_type="text/plain")
@csrf_exempt
def uploadyum(request):
- if request.method != 'PUT':
- return HttpServerError(request, "Invalid method")
- if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS:
- return HttpServerError(request, "Invalid client address")
- # We have the data in request.body. Attempt to load it as
- # json to ensure correct format.
- json.loads(request.body)
+ if request.method != 'PUT':
+ return HttpServerError(request, "Invalid method")
+ if not request.META['REMOTE_ADDR'] in settings.FTP_MASTERS:
+ return HttpServerError(request, "Invalid client address")
+ # We have the data in request.body. Attempt to load it as
+ # json to ensure correct format.
+ json.loads(request.body)
- # Next, check if it's the same as the current file
- if os.path.isfile(settings.YUM_JSON):
- with open(settings.YUM_JSON, "r") as f:
- if f.read() == request.body:
- # Don't rewrite the file or purge any data if nothing changed
- return HttpResponse("NOT CHANGED", content_type="text/plain")
+ # Next, check if it's the same as the current file
+ if os.path.isfile(settings.YUM_JSON):
+ with open(settings.YUM_JSON, "r") as f:
+ if f.read() == request.body:
+ # Don't rewrite the file or purge any data if nothing changed
+ return HttpResponse("NOT CHANGED", content_type="text/plain")
- # File has changed - let's write it!
- with open("%s.new" % settings.YUM_JSON, "w") as f:
- f.write(request.body)
+ # File has changed - let's write it!
+ with open("%s.new" % settings.YUM_JSON, "w") as f:
+ f.write(request.body)
- os.rename("%s.new" % settings.YUM_JSON, settings.YUM_JSON)
+ os.rename("%s.new" % settings.YUM_JSON, settings.YUM_JSON)
- # Purge it out of varnish so we start responding right away
- varnish_purge("/download/js/yum.js")
+ # Purge it out of varnish so we start responding right away
+ varnish_purge("/download/js/yum.js")
- # Finally, indicate to the client that we're happy
- return HttpResponse("OK", content_type="text/plain")
+ # Finally, indicate to the client that we're happy
+ return HttpResponse("OK", content_type="text/plain")
@nocache
def mirrorselect(request, path):
- # Old access to mirrors will just redirect to the main ftp site.
- # We don't really need it anymore, but the cost of keeping it is
- # very low...
- return HttpResponseRedirect("https://ftp.postgresql.org/pub/%s" % path)
+ # Old access to mirrors will just redirect to the main ftp site.
+ # We don't really need it anymore, but the cost of keeping it is
+ # very low...
+ return HttpResponseRedirect("https://ftp.postgresql.org/pub/%s" % path)
# Render javascript for yum downloads
def yum_js(request):
- with open(settings.YUM_JSON) as f:
- jsonstr = f.read()
- return render(request, 'downloads/js/yum.js', {
- 'json': jsonstr,
- 'supported_versions': ','.join([str(v.numtree) for v in Version.objects.filter(supported=True)]),
- }, content_type='application/json')
+ with open(settings.YUM_JSON) as f:
+ jsonstr = f.read()
+ return render(request, 'downloads/js/yum.js', {
+ 'json': jsonstr,
+ 'supported_versions': ','.join([str(v.numtree) for v in Version.objects.filter(supported=True)]),
+ }, content_type='application/json')
#######
# Product catalogue
#######
def categorylist(request):
- categories = Category.objects.all()
- return render_pgweb(request, 'download', 'downloads/categorylist.html', {
- 'categories': categories,
- })
+ categories = Category.objects.all()
+ return render_pgweb(request, 'download', 'downloads/categorylist.html', {
+ 'categories': categories,
+ })
def productlist(request, catid, junk=None):
- category = get_object_or_404(Category, pk=catid)
- products = Product.objects.select_related('org','licencetype').filter(category=category, approved=True)
- return render_pgweb(request, 'download', 'downloads/productlist.html', {
- 'category': category,
- 'products': products,
- 'productcount': len(products),
- })
+ category = get_object_or_404(Category, pk=catid)
+ products = Product.objects.select_related('org','licencetype').filter(category=category, approved=True)
+ return render_pgweb(request, 'download', 'downloads/productlist.html', {
+ 'category': category,
+ 'products': products,
+ 'productcount': len(products),
+ })
@login_required
def productform(request, itemid):
- return simple_form(Product, itemid, request, ProductForm,
- redirect='/account/edit/products/')
+ return simple_form(Product, itemid, request, ProductForm,
+ redirect='/account/edit/products/')
#######
# Stackbuilder
#######
def applications_v2_xml(request):
- all_apps = StackBuilderApp.objects.select_related().filter(active=True)
-
- resp = HttpResponse(content_type='text/xml')
- x = PgXmlHelper(resp, skipempty=True)
- x.startDocument()
- x.startElement('applications', {})
- for a in all_apps:
- x.startElement('application', {})
- x.add_xml_element('id', a.textid)
- x.add_xml_element('platform', a.platform)
- x.add_xml_element('secondaryplatform', a.secondaryplatform)
- x.add_xml_element('version', a.version)
- x.add_xml_element('name', a.name)
- x.add_xml_element('description', a.description)
- x.add_xml_element('category', a.category)
- x.add_xml_element('pgversion', a.pgversion)
- x.add_xml_element('edbversion', a.edbversion)
- x.add_xml_element('format', a.format)
- x.add_xml_element('installoptions', a.installoptions)
- x.add_xml_element('upgradeoptions', a.upgradeoptions)
- x.add_xml_element('checksum', a.checksum)
- x.add_xml_element('mirrorpath', a.mirrorpath)
- x.add_xml_element('alturl', a.alturl)
- x.add_xml_element('versionkey', a.versionkey)
- x.add_xml_element('manifesturl', a.manifesturl)
- for dep in a.txtdependencies.split(','):
- x.add_xml_element('dependency', dep)
- x.endElement('application')
- x.endElement('applications')
- x.endDocument()
- return resp
+ all_apps = StackBuilderApp.objects.select_related().filter(active=True)
+
+ resp = HttpResponse(content_type='text/xml')
+ x = PgXmlHelper(resp, skipempty=True)
+ x.startDocument()
+ x.startElement('applications', {})
+ for a in all_apps:
+ x.startElement('application', {})
+ x.add_xml_element('id', a.textid)
+ x.add_xml_element('platform', a.platform)
+ x.add_xml_element('secondaryplatform', a.secondaryplatform)
+ x.add_xml_element('version', a.version)
+ x.add_xml_element('name', a.name)
+ x.add_xml_element('description', a.description)
+ x.add_xml_element('category', a.category)
+ x.add_xml_element('pgversion', a.pgversion)
+ x.add_xml_element('edbversion', a.edbversion)
+ x.add_xml_element('format', a.format)
+ x.add_xml_element('installoptions', a.installoptions)
+ x.add_xml_element('upgradeoptions', a.upgradeoptions)
+ x.add_xml_element('checksum', a.checksum)
+ x.add_xml_element('mirrorpath', a.mirrorpath)
+ x.add_xml_element('alturl', a.alturl)
+ x.add_xml_element('versionkey', a.versionkey)
+ x.add_xml_element('manifesturl', a.manifesturl)
+ for dep in a.txtdependencies.split(','):
+ x.add_xml_element('dependency', dep)
+ x.endElement('application')
+ x.endElement('applications')
+ x.endDocument()
+ return resp
from models import Event
def approve_event(modeladmin, request, queryset):
- # We need to do this in a loop even though it's less efficient,
- # since using queryset.update() will not send the moderation messages.
- for e in queryset:
- e.approved = True
- e.save()
+ # We need to do this in a loop even though it's less efficient,
+ # since using queryset.update() will not send the moderation messages.
+ for e in queryset:
+ e.approved = True
+ e.save()
approve_event.short_description = 'Approve event'
class EventAdminForm(forms.ModelForm):
- class Meta:
- model = Event
- exclude = ()
+ class Meta:
+ model = Event
+ exclude = ()
- def clean(self):
- cleaned_data = super(EventAdminForm, self).clean()
- if not cleaned_data.get('isonline'):
- if not cleaned_data.get('city'):
- self._errors['city'] = self.error_class(['City must be specified for non-online events'])
- del cleaned_data['city']
- if not cleaned_data.get('country'):
- self._errors['country'] = self.error_class(['Country must be specified for non-online events'])
- del cleaned_data['country']
- return cleaned_data
+ def clean(self):
+ cleaned_data = super(EventAdminForm, self).clean()
+ if not cleaned_data.get('isonline'):
+ if not cleaned_data.get('city'):
+ self._errors['city'] = self.error_class(['City must be specified for non-online events'])
+ del cleaned_data['city']
+ if not cleaned_data.get('country'):
+ self._errors['country'] = self.error_class(['Country must be specified for non-online events'])
+ del cleaned_data['country']
+ return cleaned_data
class EventAdmin(PgwebAdmin):
- list_display = ('title', 'org', 'startdate', 'enddate', 'approved',)
- list_filter = ('approved',)
- search_fields = ('summary', 'details', 'title', )
- actions = [approve_event, ]
- form = EventAdminForm
+ list_display = ('title', 'org', 'startdate', 'enddate', 'approved',)
+ list_filter = ('approved',)
+ search_fields = ('summary', 'details', 'title', )
+ actions = [approve_event, ]
+ form = EventAdminForm
admin.site.register(Event, EventAdmin)
from datetime import datetime, time
class EventFeed(Feed):
- title = description = "PostgreSQL events"
- link = "https://www.postgresql.org/"
+ title = description = "PostgreSQL events"
+ link = "https://www.postgresql.org/"
- description_template = 'events/rss_description.html'
- title_template = 'events/rss_title.html'
+ description_template = 'events/rss_description.html'
+ title_template = 'events/rss_title.html'
- def items(self):
- return Event.objects.filter(approved=True)[:10]
+ def items(self):
+ return Event.objects.filter(approved=True)[:10]
- def item_link(self, obj):
- return "https://www.postgresql.org/about/event/%s/" % obj.id
+ def item_link(self, obj):
+ return "https://www.postgresql.org/about/event/%s/" % obj.id
- def item_pubdate(self, obj):
- return datetime.combine(obj.startdate,time.min)
+ def item_pubdate(self, obj):
+ return datetime.combine(obj.startdate,time.min)
from models import Event
class EventForm(forms.ModelForm):
- toggle_fields = [
- {
- 'name': 'isonline',
- 'invert': True,
- 'fields': ['city', 'state', 'country',]
- },
- ]
- def __init__(self, *args, **kwargs):
- super(EventForm, self).__init__(*args, **kwargs)
- def filter_by_user(self, user):
- self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+ toggle_fields = [
+ {
+ 'name': 'isonline',
+ 'invert': True,
+ 'fields': ['city', 'state', 'country',]
+ },
+ ]
+ def __init__(self, *args, **kwargs):
+ super(EventForm, self).__init__(*args, **kwargs)
+ def filter_by_user(self, user):
+ self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
- def clean(self):
- cleaned_data = super(EventForm, self).clean()
- if not cleaned_data.get('isonline'):
- # Non online events require city and country
- # (we don't require state, since many countries have no such thing)
- if not cleaned_data.get('city'):
- self._errors['city'] = self.error_class(['City must be specified for non-online events'])
- del cleaned_data['city']
- if not cleaned_data.get('country'):
- self._errors['country'] = self.error_class(['Country must be specified for non-online events'])
- del cleaned_data['country']
- return cleaned_data
+ def clean(self):
+ cleaned_data = super(EventForm, self).clean()
+ if not cleaned_data.get('isonline'):
+ # Non online events require city and country
+ # (we don't require state, since many countries have no such thing)
+ if not cleaned_data.get('city'):
+ self._errors['city'] = self.error_class(['City must be specified for non-online events'])
+ del cleaned_data['city']
+ if not cleaned_data.get('country'):
+ self._errors['country'] = self.error_class(['Country must be specified for non-online events'])
+ del cleaned_data['country']
+ return cleaned_data
- def clean_startdate(self):
- if self.instance.pk and self.instance.approved:
- if self.cleaned_data['startdate'] != self.instance.startdate:
- raise ValidationError("You cannot change the dates on events that have been approved")
- return self.cleaned_data['startdate']
+ def clean_startdate(self):
+ if self.instance.pk and self.instance.approved:
+ if self.cleaned_data['startdate'] != self.instance.startdate:
+ raise ValidationError("You cannot change the dates on events that have been approved")
+ return self.cleaned_data['startdate']
- def clean_enddate(self):
- if self.instance.pk and self.instance.approved:
- if self.cleaned_data['enddate'] != self.instance.enddate:
- raise ValidationError("You cannot change the dates on events that have been approved")
- if self.cleaned_data.has_key('startdate') and self.cleaned_data['enddate'] < self.cleaned_data['startdate']:
- raise ValidationError("End date cannot be before start date!")
- return self.cleaned_data['enddate']
+ def clean_enddate(self):
+ if self.instance.pk and self.instance.approved:
+ if self.cleaned_data['enddate'] != self.instance.enddate:
+ raise ValidationError("You cannot change the dates on events that have been approved")
+ if self.cleaned_data.has_key('startdate') and self.cleaned_data['enddate'] < self.cleaned_data['startdate']:
+ raise ValidationError("End date cannot be before start date!")
+ return self.cleaned_data['enddate']
- class Meta:
- model = Event
- exclude = ('submitter', 'approved', 'description_for_badged')
+ class Meta:
+ model = Event
+ exclude = ('submitter', 'approved', 'description_for_badged')
from pgweb.core.models import Country, Language, Organisation
class Event(models.Model):
- approved = models.BooleanField(null=False, blank=False, default=False)
+ approved = models.BooleanField(null=False, blank=False, default=False)
- org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
- title = models.CharField(max_length=100, null=False, blank=False)
- isonline = models.BooleanField(null=False, default=False, verbose_name="Online event")
- city = models.CharField(max_length=50, null=False, blank=True)
- state = models.CharField(max_length=50, null=False, blank=True)
- country = models.ForeignKey(Country, null=True, blank=True)
- language = models.ForeignKey(Language, null=True, blank=True, default='eng', help_text="Primary language for event. When multiple languages, specify this in the event description")
+ org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
+ title = models.CharField(max_length=100, null=False, blank=False)
+ isonline = models.BooleanField(null=False, default=False, verbose_name="Online event")
+ city = models.CharField(max_length=50, null=False, blank=True)
+ state = models.CharField(max_length=50, null=False, blank=True)
+ country = models.ForeignKey(Country, null=True, blank=True)
+ language = models.ForeignKey(Language, null=True, blank=True, default='eng', help_text="Primary language for event. When multiple languages, specify this in the event description")
- badged = models.BooleanField(null=False, blank=False, default=False, verbose_name='Community event', help_text='Choose "Community event" if this is a community recognized event following the <a href="/community/recognition/#conferences" target="_blank">community event guidelines</a>.')
- description_for_badged = models.TextField(blank=True, null=True, verbose_name='Description for community event', help_text='DEPRECRATED: This was used in the beginning of community events to collect additional information.')
- startdate = models.DateField(null=False, blank=False, verbose_name="Start date")
- enddate = models.DateField(null=False, blank=False, verbose_name="End date")
+ badged = models.BooleanField(null=False, blank=False, default=False, verbose_name='Community event', help_text='Choose "Community event" if this is a community recognized event following the <a href="/community/recognition/#conferences" target="_blank">community event guidelines</a>.')
+ description_for_badged = models.TextField(blank=True, null=True, verbose_name='Description for community event', help_text='DEPRECRATED: This was used in the beginning of community events to collect additional information.')
+ startdate = models.DateField(null=False, blank=False, verbose_name="Start date")
+ enddate = models.DateField(null=False, blank=False, verbose_name="End date")
- summary = models.TextField(blank=False, null=False, help_text="A short introduction (shown on the events listing page)")
- details = models.TextField(blank=False, null=False, help_text="Complete event description")
+ summary = models.TextField(blank=False, null=False, help_text="A short introduction (shown on the events listing page)")
+ details = models.TextField(blank=False, null=False, help_text="Complete event description")
- send_notification = True
- markdown_fields = ('details', 'summary', )
+ send_notification = True
+ markdown_fields = ('details', 'summary', )
- def purge_urls(self):
- yield '/about/event/%s/' % self.pk
- yield '/about/events/'
- yield '/events.rss'
- # FIXME: when to expire the front page?
- yield '/$'
+ def purge_urls(self):
+ yield '/about/event/%s/' % self.pk
+ yield '/about/events/'
+ yield '/events.rss'
+ # FIXME: when to expire the front page?
+ yield '/$'
- def __unicode__(self):
- return "%s: %s" % (self.startdate, self.title)
+ def __unicode__(self):
+ return "%s: %s" % (self.startdate, self.title)
- def verify_submitter(self, user):
- return (len(self.org.managers.filter(pk=user.pk)) == 1)
+ def verify_submitter(self, user):
+ return (len(self.org.managers.filter(pk=user.pk)) == 1)
- @property
- def has_organisation(self):
- mgrs = self.org.managers.all()
- if len(mgrs) == 1:
- if mgrs[0].pk == 0:
- return False # Migration organisation
- else:
- return True # Has an actual organisation
- elif len(mgrs) > 1:
- # More than one manager means it must be new
- return True
- return False # Has no organisastion at all
+ @property
+ def has_organisation(self):
+ mgrs = self.org.managers.all()
+ if len(mgrs) == 1:
+ if mgrs[0].pk == 0:
+ return False # Migration organisation
+ else:
+ return True # Has an actual organisation
+ elif len(mgrs) > 1:
+ # More than one manager means it must be new
+ return True
+ return False # Has no organisastion at all
- @property
- def displaydate(self):
- if self.startdate == self.enddate:
- return self.startdate
- else:
- return "%s – %s" % (self.startdate, self.enddate)
+ @property
+ def displaydate(self):
+ if self.startdate == self.enddate:
+ return self.startdate
+ else:
+ return "%s – %s" % (self.startdate, self.enddate)
- @property
- def locationstring(self):
- if self.isonline:
- return "online"
- elif self.state:
- return "%s, %s, %s" % (self.city, self.state, self.country)
- else:
- return "%s, %s" % (self.city, self.country)
+ @property
+ def locationstring(self):
+ if self.isonline:
+ return "online"
+ elif self.state:
+ return "%s, %s, %s" % (self.city, self.state, self.country)
+ else:
+ return "%s, %s" % (self.city, self.country)
- class Meta:
- ordering = ('-startdate','-enddate',)
+ class Meta:
+ ordering = ('-startdate','-enddate',)
from models import Event
def get_struct():
- now = date.today()
+ now = date.today()
- # We intentionally don't put /about/eventarchive/ in the sitemap,
- # since we don't care about getting it indexed.
- # We only show events in the future, so only index events in the
- # future...
+ # We intentionally don't put /about/eventarchive/ in the sitemap,
+ # since we don't care about getting it indexed.
+ # We only show events in the future, so only index events in the
+ # future...
- for n in Event.objects.filter(approved=True, enddate__gt=now):
- yearsold = (now - n.startdate).days / 365
- if yearsold > 4:
- yearsold = 4
- yield ('about/event/%s/' % n.id,
- 0.5-(yearsold/10.0))
+ for n in Event.objects.filter(approved=True, enddate__gt=now):
+ yearsold = (now - n.startdate).days / 365
+ if yearsold > 4:
+ yearsold = 4
+ yield ('about/event/%s/' % n.id,
+ 0.5-(yearsold/10.0))
from forms import EventForm
def main(request):
- community_events = Event.objects.select_related('country').filter(approved=True, badged=True).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
- other_events = Event.objects.select_related('country').filter(approved=True, badged=False).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
- return render_pgweb(request, 'about', 'events/archive.html', {
- 'title': 'Upcoming Events',
- 'eventblocks': (
- { 'name': 'Community Events', 'events': community_events, 'link': '',},
- { 'name': 'Other Events', 'events': other_events, 'link': '',},
- ),
- })
+ community_events = Event.objects.select_related('country').filter(approved=True, badged=True).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
+ other_events = Event.objects.select_related('country').filter(approved=True, badged=False).filter(enddate__gt=date.today()).order_by('enddate', 'startdate',)
+ return render_pgweb(request, 'about', 'events/archive.html', {
+ 'title': 'Upcoming Events',
+ 'eventblocks': (
+ { 'name': 'Community Events', 'events': community_events, 'link': '',},
+ { 'name': 'Other Events', 'events': other_events, 'link': '',},
+ ),
+ })
def _eventarchive(request, title):
- # Hardcode to the latest 100 events. Do we need paging too?
- events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100]
- return render_pgweb(request, 'about', 'events/archive.html', {
- 'title': '%s Archive' % title,
- 'archive': True,
- 'eventblocks': (
- {'name': title, 'events': events, },
- ),
- })
+ # Hardcode to the latest 100 events. Do we need paging too?
+ events = Event.objects.select_related('country').filter(approved=True).filter(enddate__lte=date.today()).order_by('-enddate', '-startdate',)[:100]
+ return render_pgweb(request, 'about', 'events/archive.html', {
+ 'title': '%s Archive' % title,
+ 'archive': True,
+ 'eventblocks': (
+ {'name': title, 'events': events, },
+ ),
+ })
def archive(request):
- return _eventarchive(request, 'Event')
+ return _eventarchive(request, 'Event')
def item(request, itemid, throwaway=None):
- event = get_object_or_404(Event, pk=itemid)
- if not event.approved:
- raise Http404
- return render_pgweb(request, 'about', 'events/item.html', {
- 'obj': event,
- })
+ event = get_object_or_404(Event, pk=itemid)
+ if not event.approved:
+ raise Http404
+ return render_pgweb(request, 'about', 'events/item.html', {
+ 'obj': event,
+ })
@login_required
def form(request, itemid):
- return simple_form(Event, itemid, request, EventForm,
- redirect='/account/edit/events/')
+ return simple_form(Event, itemid, request, EventForm,
+ redirect='/account/edit/events/')
from models import Feature, FeatureGroup
class FeatureInline(admin.TabularInline):
- model = Feature
+ model = Feature
class FeatureGroupAdmin(admin.ModelAdmin):
- inlines = [FeatureInline, ]
- list_display = ('groupname', 'groupsort')
- ordering = ['groupsort']
+ inlines = [FeatureInline, ]
+ list_display = ('groupname', 'groupsort')
+ ordering = ['groupsort']
class FeatureAdmin(admin.ModelAdmin):
- list_display = ('featurename', 'group')
- list_filter = ('group',)
- search_fields = ('featurename',)
+ list_display = ('featurename', 'group')
+ list_filter = ('group',)
+ search_fields = ('featurename',)
admin.site.register(FeatureGroup, FeatureGroupAdmin)
admin.site.register(Feature, FeatureAdmin)
name='v96',
field=models.IntegerField(default=0, verbose_name=b'9.6', choices=[(0, b'No'), (1, b'Yes'), (2, b'Obsolete'), (3, b'?')]),
),
- migrations.RunSQL("UPDATE featurematrix_feature SET v96=v95 WHERE NOT v96=v95"),
+ migrations.RunSQL("UPDATE featurematrix_feature SET v96=v95 WHERE NOT v96=v95"),
]
name='v10',
field=models.IntegerField(default=0, verbose_name=b'10', choices=[(0, b'No'), (1, b'Yes'), (2, b'Obsolete'), (3, b'?')]),
),
- migrations.RunSQL("UPDATE featurematrix_feature SET v10=v96 WHERE NOT v10=v96"),
+ migrations.RunSQL("UPDATE featurematrix_feature SET v10=v96 WHERE NOT v10=v96"),
]
choices = [(k, v['str']) for k,v in choices_map.items()]
class FeatureGroup(models.Model):
- groupname = models.CharField(max_length=100, null=False, blank=False)
- groupsort = models.IntegerField(null=False, blank=False)
+ groupname = models.CharField(max_length=100, null=False, blank=False)
+ groupsort = models.IntegerField(null=False, blank=False)
- purge_urls = ('/about/featurematrix/', )
+ purge_urls = ('/about/featurematrix/', )
- def __unicode__(self):
- return self.groupname
+ def __unicode__(self):
+ return self.groupname
- @property
- def columns(self):
- # Return a list of all the columns for the matrix
- return [b for a,b in versions]
+ @property
+ def columns(self):
+ # Return a list of all the columns for the matrix
+ return [b for a,b in versions]
class Feature(models.Model):
- group = models.ForeignKey(FeatureGroup, null=False, blank=False)
- featurename = models.CharField(max_length=100, null=False, blank=False)
- featuredescription = models.TextField(null=False, blank=True)
- #WARNING! All fields that start with "v" will be considered versions!
- v74 = models.IntegerField(verbose_name="7.4", null=False, blank=False, default=0, choices=choices)
- v74.visible_default = False
- v80 = models.IntegerField(verbose_name="8.0", null=False, blank=False, default=0, choices=choices)
- v80.visible_default = False
- v81 = models.IntegerField(verbose_name="8.1", null=False, blank=False, default=0, choices=choices)
- v82 = models.IntegerField(verbose_name="8.2", null=False, blank=False, default=0, choices=choices)
- v83 = models.IntegerField(verbose_name="8.3", null=False, blank=False, default=0, choices=choices)
- v84 = models.IntegerField(verbose_name="8.4", null=False, blank=False, default=0, choices=choices)
- v90 = models.IntegerField(verbose_name="9.0", null=False, blank=False, default=0, choices=choices)
- v91 = models.IntegerField(verbose_name="9.1", null=False, blank=False, default=0, choices=choices)
- v92 = models.IntegerField(verbose_name="9.2", null=False, blank=False, default=0, choices=choices)
- v93 = models.IntegerField(verbose_name="9.3", null=False, blank=False, default=0, choices=choices)
- v94 = models.IntegerField(verbose_name="9.4", null=False, blank=False, default=0, choices=choices)
- v95 = models.IntegerField(verbose_name="9.5", null=False, blank=False, default=0, choices=choices)
- v96 = models.IntegerField(verbose_name="9.6", null=False, blank=False, default=0, choices=choices)
- v10 = models.IntegerField(verbose_name="10", null=False, blank=False, default=0, choices=choices)
- v11 = models.IntegerField(verbose_name="11", null=False, blank=False, default=0, choices=choices)
+ group = models.ForeignKey(FeatureGroup, null=False, blank=False)
+ featurename = models.CharField(max_length=100, null=False, blank=False)
+ featuredescription = models.TextField(null=False, blank=True)
+ #WARNING! All fields that start with "v" will be considered versions!
+ v74 = models.IntegerField(verbose_name="7.4", null=False, blank=False, default=0, choices=choices)
+ v74.visible_default = False
+ v80 = models.IntegerField(verbose_name="8.0", null=False, blank=False, default=0, choices=choices)
+ v80.visible_default = False
+ v81 = models.IntegerField(verbose_name="8.1", null=False, blank=False, default=0, choices=choices)
+ v82 = models.IntegerField(verbose_name="8.2", null=False, blank=False, default=0, choices=choices)
+ v83 = models.IntegerField(verbose_name="8.3", null=False, blank=False, default=0, choices=choices)
+ v84 = models.IntegerField(verbose_name="8.4", null=False, blank=False, default=0, choices=choices)
+ v90 = models.IntegerField(verbose_name="9.0", null=False, blank=False, default=0, choices=choices)
+ v91 = models.IntegerField(verbose_name="9.1", null=False, blank=False, default=0, choices=choices)
+ v92 = models.IntegerField(verbose_name="9.2", null=False, blank=False, default=0, choices=choices)
+ v93 = models.IntegerField(verbose_name="9.3", null=False, blank=False, default=0, choices=choices)
+ v94 = models.IntegerField(verbose_name="9.4", null=False, blank=False, default=0, choices=choices)
+ v95 = models.IntegerField(verbose_name="9.5", null=False, blank=False, default=0, choices=choices)
+ v96 = models.IntegerField(verbose_name="9.6", null=False, blank=False, default=0, choices=choices)
+ v10 = models.IntegerField(verbose_name="10", null=False, blank=False, default=0, choices=choices)
+ v11 = models.IntegerField(verbose_name="11", null=False, blank=False, default=0, choices=choices)
- purge_urls = ('/about/featurematrix/.*', )
+ purge_urls = ('/about/featurematrix/.*', )
- def __unicode__(self):
- # To make it look good in the admin interface, just don't render it
- return ''
+ def __unicode__(self):
+ # To make it look good in the admin interface, just don't render it
+ return ''
- def columns(self):
- # Get a list of column based on all versions that are visible_default
- return [choices_map[getattr(self, a)] for a,b in versions]
+ def columns(self):
+ # Get a list of column based on all versions that are visible_default
+ return [choices_map[getattr(self, a)] for a,b in versions]
- @property
- def featurelink(self):
- if self.featuredescription.startswith('https://') or self.featuredescription.startswith('http://'):
- return self.featuredescription
- else:
- return 'detail/%s/' % self.id
+ @property
+ def featurelink(self):
+ if self.featuredescription.startswith('https://') or self.featuredescription.startswith('http://'):
+ return self.featuredescription
+ else:
+ return 'detail/%s/' % self.id
versions = [(f.name,f.verbose_name) for f in Feature()._meta.fields if f.name.startswith('v') and getattr(f, 'visible_default', True)]
versions = sorted(versions, key=lambda f: -float(f[1]))
def get_struct():
- yield ('about/featurematrix/', None)
+ yield ('about/featurematrix/', None)
from models import Feature
def root(request):
- features = Feature.objects.all().select_related().order_by('group__groupsort', 'group__groupname', 'featurename')
- groups = []
- lastgroup = -1
- currentgroup = None
- for f in features:
- if f.group.id != lastgroup:
- if currentgroup:
- groups.append(currentgroup)
- lastgroup = f.group.id
- currentgroup = {
- 'group': f.group,
- 'features': [],
- }
- currentgroup['features'].append(f)
- if currentgroup:
- groups.append(currentgroup)
+ features = Feature.objects.all().select_related().order_by('group__groupsort', 'group__groupname', 'featurename')
+ groups = []
+ lastgroup = -1
+ currentgroup = None
+ for f in features:
+ if f.group.id != lastgroup:
+ if currentgroup:
+ groups.append(currentgroup)
+ lastgroup = f.group.id
+ currentgroup = {
+ 'group': f.group,
+ 'features': [],
+ }
+ currentgroup['features'].append(f)
+ if currentgroup:
+ groups.append(currentgroup)
- versions = Version.objects.filter(tree__gte='8.1').order_by('-tree')
- return render_pgweb(request, 'about', 'featurematrix/featurematrix.html', {
- 'groups': groups,
- 'versions': versions,
- })
+ versions = Version.objects.filter(tree__gte='8.1').order_by('-tree')
+ return render_pgweb(request, 'about', 'featurematrix/featurematrix.html', {
+ 'groups': groups,
+ 'versions': versions,
+ })
def detail(request, featureid):
- feature = get_object_or_404(Feature, pk=featureid)
- return render_pgweb(request, 'about', 'featurematrix/featuredetail.html', {
- 'feature': feature,
- })
+ feature = get_object_or_404(Feature, pk=featureid)
+ return render_pgweb(request, 'about', 'featurematrix/featuredetail.html', {
+ 'feature': feature,
+ })
from django.http import HttpResponseRedirect
def mailpref(request, listname):
- # Just redirect to the homepage of pglister, don't try specific lists
- return HttpResponseRedirect("https://lists.postgresql.org/")
+ # Just redirect to the homepage of pglister, don't try specific lists
+ return HttpResponseRedirect("https://lists.postgresql.org/")
import requests
class Command(BaseCommand):
- help = 'Synchronize mailinglists'
-
- def add_arguments(self, parser):
- parser.add_argument('--dryrun', action='store_true', help="Don't commit changes")
-
- def handle(self, *args, **options):
- if settings.ARCHIVES_SEARCH_PLAINTEXT:
- proto="http"
- else:
- proto="https"
- r = requests.get('{0}://{1}/listinfo/'.format(proto, settings.ARCHIVES_SEARCH_SERVER))
- j = r.json()
- allgroups = list(set([l['group'] for l in j]))
- with transaction.atomic():
- curs = connection.cursor()
-
- # Add any groups necessary
- curs.execute("INSERT INTO lists_mailinglistgroup (groupname, sortkey) SELECT n,50 FROM UNNEST(%s) n(n) WHERE NOT EXISTS (SELECT 1 FROM lists_mailinglistgroup WHERE groupname=n) RETURNING groupname", (allgroups,))
- for n, in curs.fetchall():
- print "Added group %s" % n
-
- # Add and update lists
- for l in j:
- curs.execute("SELECT id FROM lists_mailinglist WHERE listname=%s", (l['name'],))
- if curs.rowcount == 0:
- curs.execute("INSERT INTO lists_mailinglist (listname, group_id, active, description, shortdesc) VALUES (%s, (SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), %s, %s, %s)", (
- l['name'], l['group'], l['active'], l['description'], l['shortdesc']))
- print "Added list %s" % l['name']
- else:
- curs.execute("UPDATE lists_mailinglist SET group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), active=%s, description=%s, shortdesc=%s WHERE listname=%s AND NOT (group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s) AND active=%s AND description=%s AND shortdesc=%s) RETURNING listname", (
- l['group'], l['active'], l['description'], l['shortdesc'],
- l['name'],
- l['group'], l['active'], l['description'], l['shortdesc'],
- ))
- for n, in curs.fetchall():
- print "Updated list %s" % n
-
- # Delete any lists that shouldn't exist anymore (this is safe because we don't keep any data about them,
- # so they are trivial to add back)
- curs.execute("DELETE FROM lists_mailinglist WHERE NOT listname=ANY(%s) RETURNING listname", ([l['name'] for l in j],))
- for n, in curs.fetchall():
- print "Deleted list %s" % n
- # Delete listgroups
- curs.execute("DELETE FROM lists_mailinglistgroup WHERE NOT groupname=ANY(%s) RETURNING groupname", (allgroups,))
- for n, in curs.fetchall():
- print "Deleted group %s" % n
-
- if options['dryrun']:
- raise CommandError("Dry run, rolling back")
+ help = 'Synchronize mailinglists'
+
+ def add_arguments(self, parser):
+ parser.add_argument('--dryrun', action='store_true', help="Don't commit changes")
+
+ def handle(self, *args, **options):
+ if settings.ARCHIVES_SEARCH_PLAINTEXT:
+ proto="http"
+ else:
+ proto="https"
+ r = requests.get('{0}://{1}/listinfo/'.format(proto, settings.ARCHIVES_SEARCH_SERVER))
+ j = r.json()
+ allgroups = list(set([l['group'] for l in j]))
+ with transaction.atomic():
+ curs = connection.cursor()
+
+ # Add any groups necessary
+ curs.execute("INSERT INTO lists_mailinglistgroup (groupname, sortkey) SELECT n,50 FROM UNNEST(%s) n(n) WHERE NOT EXISTS (SELECT 1 FROM lists_mailinglistgroup WHERE groupname=n) RETURNING groupname", (allgroups,))
+ for n, in curs.fetchall():
+ print "Added group %s" % n
+
+ # Add and update lists
+ for l in j:
+ curs.execute("SELECT id FROM lists_mailinglist WHERE listname=%s", (l['name'],))
+ if curs.rowcount == 0:
+ curs.execute("INSERT INTO lists_mailinglist (listname, group_id, active, description, shortdesc) VALUES (%s, (SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), %s, %s, %s)", (
+ l['name'], l['group'], l['active'], l['description'], l['shortdesc']))
+ print "Added list %s" % l['name']
+ else:
+ curs.execute("UPDATE lists_mailinglist SET group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s), active=%s, description=%s, shortdesc=%s WHERE listname=%s AND NOT (group_id=(SELECT id FROM lists_mailinglistgroup WHERE groupname=%s) AND active=%s AND description=%s AND shortdesc=%s) RETURNING listname", (
+ l['group'], l['active'], l['description'], l['shortdesc'],
+ l['name'],
+ l['group'], l['active'], l['description'], l['shortdesc'],
+ ))
+ for n, in curs.fetchall():
+ print "Updated list %s" % n
+
+ # Delete any lists that shouldn't exist anymore (this is safe because we don't keep any data about them,
+ # so they are trivial to add back)
+ curs.execute("DELETE FROM lists_mailinglist WHERE NOT listname=ANY(%s) RETURNING listname", ([l['name'] for l in j],))
+ for n, in curs.fetchall():
+ print "Deleted list %s" % n
+ # Delete listgroups
+ curs.execute("DELETE FROM lists_mailinglistgroup WHERE NOT groupname=ANY(%s) RETURNING groupname", (allgroups,))
+ for n, in curs.fetchall():
+ print "Deleted group %s" % n
+
+ if options['dryrun']:
+ raise CommandError("Dry run, rolling back")
from django.db import models
class MailingListGroup(models.Model):
- groupname = models.CharField(max_length=64, null=False, blank=False)
- sortkey = models.IntegerField(null=False, default=10)
+ groupname = models.CharField(max_length=64, null=False, blank=False)
+ sortkey = models.IntegerField(null=False, default=10)
- purge_urls = ('/community/lists/', )
+ purge_urls = ('/community/lists/', )
- @property
- def negid(self):
- return -self.id
+ @property
+ def negid(self):
+ return -self.id
- def __unicode__(self):
- return self.groupname
+ def __unicode__(self):
+ return self.groupname
- class Meta:
- ordering = ('sortkey', )
+ class Meta:
+ ordering = ('sortkey', )
class MailingList(models.Model):
- group = models.ForeignKey(MailingListGroup, null=False)
- listname = models.CharField(max_length=64, null=False, blank=False, unique=True)
- active = models.BooleanField(null=False, default=False)
- description = models.TextField(null=False, blank=True)
- shortdesc = models.TextField(null=False, blank=True)
+ group = models.ForeignKey(MailingListGroup, null=False)
+ listname = models.CharField(max_length=64, null=False, blank=False, unique=True)
+ active = models.BooleanField(null=False, default=False)
+ description = models.TextField(null=False, blank=True)
+ shortdesc = models.TextField(null=False, blank=True)
- purge_urls = ('/community/lists/', )
+ purge_urls = ('/community/lists/', )
- @property
- def maybe_shortdesc(self):
- if self.shortdesc:
- return self.shortdesc
- return self.listname
+ @property
+ def maybe_shortdesc(self):
+ if self.shortdesc:
+ return self.shortdesc
+ return self.listname
- def __unicode__(self):
- return self.listname
+ def __unicode__(self):
+ return self.listname
- class Meta:
- ordering = ('listname', )
+ class Meta:
+ ordering = ('listname', )
def get_struct():
- yield ('community/lists/', None)
+ yield ('community/lists/', None)
from models import MailingList, MailingListGroup
def listinfo(request):
- resp = HttpResponse(content_type='application/json')
- groupdata = [ {
- 'id': g.id,
- 'name': g.groupname,
- 'sort': g.sortkey,
- } for g in MailingListGroup.objects.all()]
- listdata = [ {
- 'id': l.id,
- 'name': l.listname,
- 'groupid': l.group_id,
- 'active': l.active,
- 'shortdesc': l.shortdesc,
- 'description': l.description,
- } for l in MailingList.objects.all()]
- json.dump({'groups': groupdata, 'lists': listdata}, resp)
- return resp
+ resp = HttpResponse(content_type='application/json')
+ groupdata = [ {
+ 'id': g.id,
+ 'name': g.groupname,
+ 'sort': g.sortkey,
+ } for g in MailingListGroup.objects.all()]
+ listdata = [ {
+ 'id': l.id,
+ 'name': l.listname,
+ 'groupid': l.group_id,
+ 'active': l.active,
+ 'shortdesc': l.shortdesc,
+ 'description': l.description,
+ } for l in MailingList.objects.all()]
+ json.dump({'groups': groupdata, 'lists': listdata}, resp)
+ return resp
from models import QueuedMail
class QueuedMailAdmin(admin.ModelAdmin):
- model = QueuedMail
- readonly_fields = ('parsed_content', )
+ model = QueuedMail
+ readonly_fields = ('parsed_content', )
- def parsed_content(self, obj):
- # We only try to parse the *first* piece, because we assume
- # all our emails are trivial.
- try:
- parser = Parser()
- msg = parser.parsestr(obj.fullmsg)
- b = msg.get_payload(decode=True)
- if b: return b
+ def parsed_content(self, obj):
+ # We only try to parse the *first* piece, because we assume
+ # all our emails are trivial.
+ try:
+ parser = Parser()
+ msg = parser.parsestr(obj.fullmsg)
+ b = msg.get_payload(decode=True)
+ if b: return b
- pl = msg.get_payload()
- for p in pl:
- b = p.get_payload(decode=True)
- if b: return b
- return "Could not find body"
- except Exception, e:
- return "Failed to get body: %s" % e
+ pl = msg.get_payload()
+ for p in pl:
+ b = p.get_payload(decode=True)
+ if b: return b
+ return "Could not find body"
+ except Exception, e:
+ return "Failed to get body: %s" % e
- parsed_content.short_description = 'Parsed mail'
+ parsed_content.short_description = 'Parsed mail'
admin.site.register(QueuedMail, QueuedMailAdmin)
from pgweb.mailqueue.models import QueuedMail
class Command(BaseCommand):
- help = 'Send queued mail'
+ help = 'Send queued mail'
- def handle(self, *args, **options):
- # Grab advisory lock, if available. Lock id is just a random number
- # since we only need to interlock against ourselves. The lock is
- # automatically released when we're done.
- curs = connection.cursor()
- curs.execute("SELECT pg_try_advisory_lock(72181372)")
- if not curs.fetchall()[0][0]:
- raise CommandError("Failed to get advisory lock, existing send_queued_mail process stuck?")
+ def handle(self, *args, **options):
+ # Grab advisory lock, if available. Lock id is just a random number
+ # since we only need to interlock against ourselves. The lock is
+ # automatically released when we're done.
+ curs = connection.cursor()
+ curs.execute("SELECT pg_try_advisory_lock(72181372)")
+ if not curs.fetchall()[0][0]:
+ raise CommandError("Failed to get advisory lock, existing send_queued_mail process stuck?")
- for m in QueuedMail.objects.all():
- # Yes, we do a new connection for each run. Just because we can.
- # If it fails we'll throw an exception and just come back on the
- # next cron job. And local delivery should never fail...
- if m.usergenerated:
- # User generated email gets relayed directly over a frontend
- smtphost = settings.FRONTEND_SMTP_RELAY
- else:
- smtphost = 'localhost'
- smtp = smtplib.SMTP(smtphost)
- try:
- smtp.sendmail(m.sender, m.receiver, m.fullmsg.encode('utf-8'))
- except (smtplib.SMTPSenderRefused, smtplib.SMTPRecipientsRefused, smtplib.SMTPDataError):
- # If this was user generated, this indicates the antispam
- # kicking in, so we just ignore it. If it's anything else,
- # we want to let the exception through.
- if not m.usergenerated:
- raise
+ for m in QueuedMail.objects.all():
+ # Yes, we do a new connection for each run. Just because we can.
+ # If it fails we'll throw an exception and just come back on the
+ # next cron job. And local delivery should never fail...
+ if m.usergenerated:
+ # User generated email gets relayed directly over a frontend
+ smtphost = settings.FRONTEND_SMTP_RELAY
+ else:
+ smtphost = 'localhost'
+ smtp = smtplib.SMTP(smtphost)
+ try:
+ smtp.sendmail(m.sender, m.receiver, m.fullmsg.encode('utf-8'))
+ except (smtplib.SMTPSenderRefused, smtplib.SMTPRecipientsRefused, smtplib.SMTPDataError):
+ # If this was user generated, this indicates the antispam
+ # kicking in, so we just ignore it. If it's anything else,
+ # we want to let the exception through.
+ if not m.usergenerated:
+ raise
- smtp.close()
- m.delete()
+ smtp.close()
+ m.delete()
from django.db import models
class QueuedMail(models.Model):
- sender = models.EmailField(max_length=100, null=False, blank=False)
- receiver = models.EmailField(max_length=100, null=False, blank=False)
- # We store the raw MIME message, so if there are any attachments or
- # anything, we just push them right in there!
- fullmsg = models.TextField(null=False, blank=False)
- # Flag if the message is "user generated", so we can treat those
- # separately from an antispam and delivery perspective.
- usergenerated = models.BooleanField(null=False, blank=False, default=False)
+ sender = models.EmailField(max_length=100, null=False, blank=False)
+ receiver = models.EmailField(max_length=100, null=False, blank=False)
+ # We store the raw MIME message, so if there are any attachments or
+ # anything, we just push them right in there!
+ fullmsg = models.TextField(null=False, blank=False)
+ # Flag if the message is "user generated", so we can treat those
+ # separately from an antispam and delivery perspective.
+ usergenerated = models.BooleanField(null=False, blank=False, default=False)
- def __unicode__(self):
- return "%s: %s -> %s" % (self.pk, self.sender, self.receiver)
+ def __unicode__(self):
+ return "%s: %s -> %s" % (self.pk, self.sender, self.receiver)
from models import QueuedMail
def _encoded_email_header(name, email):
- if name:
- return formataddr((str(Header(name, 'utf-8')), email))
- return email
+ if name:
+ return formataddr((str(Header(name, 'utf-8')), email))
+ return email
def send_simple_mail(sender, receiver, subject, msgtxt, attachments=None, usergenerated=False, cc=None, replyto=None, sendername=None, receivername=None, messageid=None):
- # attachment format, each is a tuple of (name, mimetype,contents)
- # content should be *binary* and not base64 encoded, since we need to
- # use the base64 routines from the email library to get a properly
- # formatted output message
- msg = MIMEMultipart()
- msg['Subject'] = subject
- msg['To'] = _encoded_email_header(receivername, receiver)
- msg['From'] = _encoded_email_header(sendername, sender)
- if cc:
- msg['Cc'] = cc
- if replyto:
- msg['Reply-To'] = replyto
- msg['Date'] = formatdate(localtime=True)
- if messageid:
- msg['Message-ID'] = messageid
- else:
- msg['Message-ID'] = make_msgid()
-
- msg.attach(MIMEText(msgtxt, _charset='utf-8'))
-
- if attachments:
- for filename, contenttype, content in attachments:
- main,sub = contenttype.split('/')
- part = MIMENonMultipart(main,sub)
- part.set_payload(content)
- part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename)
- encoders.encode_base64(part)
- msg.attach(part)
-
-
- # Just write it to the queue, so it will be transactionally rolled back
- QueuedMail(sender=sender, receiver=receiver, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
- if cc:
- # Write a second copy for the cc, wihch will be delivered
- # directly to the recipient. (The sender doesn't parse the
- # message content to extract cc fields).
- QueuedMail(sender=sender, receiver=cc, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
+ # attachment format, each is a tuple of (name, mimetype,contents)
+ # content should be *binary* and not base64 encoded, since we need to
+ # use the base64 routines from the email library to get a properly
+ # formatted output message
+ msg = MIMEMultipart()
+ msg['Subject'] = subject
+ msg['To'] = _encoded_email_header(receivername, receiver)
+ msg['From'] = _encoded_email_header(sendername, sender)
+ if cc:
+ msg['Cc'] = cc
+ if replyto:
+ msg['Reply-To'] = replyto
+ msg['Date'] = formatdate(localtime=True)
+ if messageid:
+ msg['Message-ID'] = messageid
+ else:
+ msg['Message-ID'] = make_msgid()
+
+ msg.attach(MIMEText(msgtxt, _charset='utf-8'))
+
+ if attachments:
+ for filename, contenttype, content in attachments:
+ main,sub = contenttype.split('/')
+ part = MIMENonMultipart(main,sub)
+ part.set_payload(content)
+ part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename)
+ encoders.encode_base64(part)
+ msg.attach(part)
+
+
+ # Just write it to the queue, so it will be transactionally rolled back
+ QueuedMail(sender=sender, receiver=receiver, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
+ if cc:
+ # Write a second copy for the cc, wihch will be delivered
+ # directly to the recipient. (The sender doesn't parse the
+ # message content to extract cc fields).
+ QueuedMail(sender=sender, receiver=cc, fullmsg=msg.as_string(), usergenerated=usergenerated).save()
def send_mail(sender, receiver, fullmsg, usergenerated=False):
- # Send an email, prepared as the full MIME encoded mail already
- QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg, usergenerated=False).save()
+ # Send an email, prepared as the full MIME encoded mail already
+ QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg, usergenerated=False).save()
from pgweb.core.models import Version
class _version_choices():
- def __iter__(self):
- yield ('-1', '** Select version')
- q = Q(supported=True) | Q(testing__gt=0)
- for v in Version.objects.filter(q):
- for minor in range(v.latestminor,-1,-1):
- if not v.testing or minor>0:
- # For beta/rc versions, there is no beta0, so exclude it
- s = v.buildversionstring(minor)
- yield (s,s)
- yield ('Unsupported/Unknown', 'Unsupported/Unknown')
+ def __iter__(self):
+ yield ('-1', '** Select version')
+ q = Q(supported=True) | Q(testing__gt=0)
+ for v in Version.objects.filter(q):
+ for minor in range(v.latestminor,-1,-1):
+ if not v.testing or minor>0:
+ # For beta/rc versions, there is no beta0, so exclude it
+ s = v.buildversionstring(minor)
+ yield (s,s)
+ yield ('Unsupported/Unknown', 'Unsupported/Unknown')
class SubmitBugForm(forms.Form):
- name = forms.CharField(max_length=100, required=True)
- email = forms.EmailField(max_length=100, required=True)
- pgversion = forms.CharField(max_length=20, required=True,
- label="PostgreSQL version",
- widget=forms.Select(choices=_version_choices()))
- os = forms.CharField(max_length=50, required=True,
- label="Operating system")
- shortdesc = forms.CharField(max_length=100, required=True,
- label="Short description")
- details = forms.CharField(required=True, widget=forms.Textarea)
+ name = forms.CharField(max_length=100, required=True)
+ email = forms.EmailField(max_length=100, required=True)
+ pgversion = forms.CharField(max_length=20, required=True,
+ label="PostgreSQL version",
+ widget=forms.Select(choices=_version_choices()))
+ os = forms.CharField(max_length=50, required=True,
+ label="Operating system")
+ shortdesc = forms.CharField(max_length=100, required=True,
+ label="Short description")
+ details = forms.CharField(required=True, widget=forms.Textarea)
- def clean_pgversion(self):
- if self.cleaned_data.get('pgversion') == '-1':
- raise forms.ValidationError('You must select a version')
- return self.cleaned_data.get('pgversion')
+ def clean_pgversion(self):
+ if self.cleaned_data.get('pgversion') == '-1':
+ raise forms.ValidationError('You must select a version')
+ return self.cleaned_data.get('pgversion')
from django.db import models
class BugIdMap(models.Model):
- # Explicit id field because we don't want a SERIAL here, since we generate
- # the actual bug IDs externally.
- id = models.IntegerField(null=False, blank=False, primary_key=True)
- messageid = models.CharField(max_length=500, null=False, blank=False)
+ # Explicit id field because we don't want a SERIAL here, since we generate
+ # the actual bug IDs externally.
+ id = models.IntegerField(null=False, blank=False, primary_key=True)
+ messageid = models.CharField(max_length=500, null=False, blank=False)
from forms import SubmitBugForm
def _make_bugs_messageid(bugid):
- return "<{0}-{1}@postgresql.org>".format(
- bugid,
- hashlib.md5("{0}-{1}".format(os.getpid(), time.time())).hexdigest()[:16],
- )
+ return "<{0}-{1}@postgresql.org>".format(
+ bugid,
+ hashlib.md5("{0}-{1}".format(os.getpid(), time.time())).hexdigest()[:16],
+ )
@login_required
def submitbug(request):
- if request.method == 'POST':
- form = SubmitBugForm(request.POST)
- if form.is_valid():
- with transaction.atomic():
- c = connection.cursor()
- c.execute("SELECT nextval('bug_id_seq')")
- bugid = c.fetchall()[0][0]
-
- messageid = _make_bugs_messageid(bugid)
-
- BugIdMap(id=bugid, messageid=messageid.strip('<>')).save()
-
- send_template_mail(
- settings.BUGREPORT_NOREPLY_EMAIL,
- settings.BUGREPORT_EMAIL,
- 'BUG #%s: %s' % (bugid, form.cleaned_data['shortdesc']),
- 'misc/bugmail.txt',
- {
- 'bugid': bugid,
- 'bug': form.cleaned_data,
- },
- usergenerated=True,
- cc=form.cleaned_data['email'],
- replyto='%s, %s' % (form.cleaned_data['email'], settings.BUGREPORT_EMAIL),
- sendername="PG Bug reporting form",
- messageid=messageid,
- )
-
- return HttpResponseRedirect("/account/submitbug/{0}/".format(bugid))
- else:
- form = SubmitBugForm(initial={
- 'name': '%s %s' % (request.user.first_name, request.user.last_name),
- 'email': request.user.email,
- })
-
- versions = Version.objects.filter(supported=True)
-
- return render_pgweb(request, 'support', 'base/form.html', {
- 'form': form,
- 'formitemtype': 'bug report',
- 'formtitle': 'Submit Bug Report <i class="fas fa-bug"></i>',
- 'operation': 'Submit',
- 'form_intro': template_to_string('misc/bug_header.html', {
- 'supportedversions': versions,
- }),
- 'savebutton': 'Submit and Send Email',
- })
+ if request.method == 'POST':
+ form = SubmitBugForm(request.POST)
+ if form.is_valid():
+ with transaction.atomic():
+ c = connection.cursor()
+ c.execute("SELECT nextval('bug_id_seq')")
+ bugid = c.fetchall()[0][0]
+
+ messageid = _make_bugs_messageid(bugid)
+
+ BugIdMap(id=bugid, messageid=messageid.strip('<>')).save()
+
+ send_template_mail(
+ settings.BUGREPORT_NOREPLY_EMAIL,
+ settings.BUGREPORT_EMAIL,
+ 'BUG #%s: %s' % (bugid, form.cleaned_data['shortdesc']),
+ 'misc/bugmail.txt',
+ {
+ 'bugid': bugid,
+ 'bug': form.cleaned_data,
+ },
+ usergenerated=True,
+ cc=form.cleaned_data['email'],
+ replyto='%s, %s' % (form.cleaned_data['email'], settings.BUGREPORT_EMAIL),
+ sendername="PG Bug reporting form",
+ messageid=messageid,
+ )
+
+ return HttpResponseRedirect("/account/submitbug/{0}/".format(bugid))
+ else:
+ form = SubmitBugForm(initial={
+ 'name': '%s %s' % (request.user.first_name, request.user.last_name),
+ 'email': request.user.email,
+ })
+
+ versions = Version.objects.filter(supported=True)
+
+ return render_pgweb(request, 'support', 'base/form.html', {
+ 'form': form,
+ 'formitemtype': 'bug report',
+ 'formtitle': 'Submit Bug Report <i class="fas fa-bug"></i>',
+ 'operation': 'Submit',
+ 'form_intro': template_to_string('misc/bug_header.html', {
+ 'supportedversions': versions,
+ }),
+ 'savebutton': 'Submit and Send Email',
+ })
@login_required
def submitbug_done(request, bugid):
- return render_pgweb(request, 'support', 'misc/bug_completed.html', {
- 'bugid': bugid,
- })
+ return render_pgweb(request, 'support', 'misc/bug_completed.html', {
+ 'bugid': bugid,
+ })
def bugs_redir(request, bugid):
- r = get_object_or_404(BugIdMap, id=bugid)
+ r = get_object_or_404(BugIdMap, id=bugid)
- return HttpResponseRedirect("{0}/message-id/{1}".format(settings.SITE_ROOT, r.messageid))
+ return HttpResponseRedirect("{0}/message-id/{1}".format(settings.SITE_ROOT, r.messageid))
# A crash testing URL. If the file /tmp/crashtest exists, raise a http 500
# error. Otherwise, just return a fixed text response
def crashtest(request):
- if os.path.exists('/tmp/crashtest'):
- raise Exception('This is a manual test of a crash!')
- else:
- return HttpResponse('Crash testing disabled', content_type='text/plain')
+ if os.path.exists('/tmp/crashtest'):
+ raise Exception('This is a manual test of a crash!')
+ else:
+ return HttpResponse('Crash testing disabled', content_type='text/plain')
from models import NewsArticle, NewsTag
class NewsArticleAdmin(PgwebAdmin):
- list_display = ('title', 'org', 'date', 'approved', )
- list_filter = ('approved', )
- filter_horizontal = ('tags', )
- search_fields = ('content', 'title', )
- change_form_template = 'admin/news/newsarticle/change_form.html'
+ list_display = ('title', 'org', 'date', 'approved', )
+ list_filter = ('approved', )
+ filter_horizontal = ('tags', )
+ search_fields = ('content', 'title', )
+ change_form_template = 'admin/news/newsarticle/change_form.html'
- def change_view(self, request, object_id, extra_context=None):
- newsarticle = NewsArticle.objects.get(pk=object_id)
- my_context = {
- 'latest': NewsArticle.objects.filter(org=newsarticle.org)[:10]
- }
- return super(NewsArticleAdmin, self).change_view(request, object_id, extra_context=my_context)
+ def change_view(self, request, object_id, extra_context=None):
+ newsarticle = NewsArticle.objects.get(pk=object_id)
+ my_context = {
+ 'latest': NewsArticle.objects.filter(org=newsarticle.org)[:10]
+ }
+ return super(NewsArticleAdmin, self).change_view(request, object_id, extra_context=my_context)
class NewsTagAdmin(PgwebAdmin):
- list_display = ('urlname', 'name', 'description')
+ list_display = ('urlname', 'name', 'description')
admin.site.register(NewsArticle, NewsArticleAdmin)
admin.site.register(NewsTag, NewsTagAdmin)
from datetime import datetime, time
class NewsFeed(Feed):
- title = description = "PostgreSQL news"
- link = "https://www.postgresql.org/"
+ title = description = "PostgreSQL news"
+ link = "https://www.postgresql.org/"
- description_template = 'news/rss_description.html'
- title_template = 'news/rss_title.html'
+ description_template = 'news/rss_description.html'
+ title_template = 'news/rss_title.html'
- def get_object(self, request, tagurl=None):
- return tagurl
+ def get_object(self, request, tagurl=None):
+ return tagurl
- def items(self, obj):
- if obj:
- return NewsArticle.objects.filter(approved=True, tags__urlname=obj)[:10]
- else:
- return NewsArticle.objects.filter(approved=True)[:10]
+ def items(self, obj):
+ if obj:
+ return NewsArticle.objects.filter(approved=True, tags__urlname=obj)[:10]
+ else:
+ return NewsArticle.objects.filter(approved=True)[:10]
- def item_link(self, obj):
- return "https://www.postgresql.org/about/news/%s/" % obj.id
+ def item_link(self, obj):
+ return "https://www.postgresql.org/about/news/%s/" % obj.id
- def item_pubdate(self, obj):
- return datetime.combine(obj.date,time.min)
+ def item_pubdate(self, obj):
+ return datetime.combine(obj.date,time.min)
from models import NewsArticle, NewsTag
class NewsArticleForm(forms.ModelForm):
- def __init__(self, *args, **kwargs):
- super(NewsArticleForm, self).__init__(*args, **kwargs)
- def filter_by_user(self, user):
- self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
- def clean_date(self):
- if self.instance.pk and self.instance.approved:
- if self.cleaned_data['date'] != self.instance.date:
- raise ValidationError("You cannot change the date on an article that has been approved")
- return self.cleaned_data['date']
+ def __init__(self, *args, **kwargs):
+ super(NewsArticleForm, self).__init__(*args, **kwargs)
+ def filter_by_user(self, user):
+ self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+ def clean_date(self):
+ if self.instance.pk and self.instance.approved:
+ if self.cleaned_data['date'] != self.instance.date:
+ raise ValidationError("You cannot change the date on an article that has been approved")
+ return self.cleaned_data['date']
- @property
- def described_checkboxes(self):
- return {
- 'tags': {t.id: t.description for t in NewsTag.objects.all()}
- }
+ @property
+ def described_checkboxes(self):
+ return {
+ 'tags': {t.id: t.description for t in NewsTag.objects.all()}
+ }
- class Meta:
- model = NewsArticle
- exclude = ('submitter', 'approved', 'tweeted')
- widgets = {
- 'tags': forms.CheckboxSelectMultiple,
- }
+ class Meta:
+ model = NewsArticle
+ exclude = ('submitter', 'approved', 'tweeted')
+ widgets = {
+ 'tags': forms.CheckboxSelectMultiple,
+ }
import requests_oauthlib
class Command(BaseCommand):
- help = 'Post to twitter'
-
- def handle(self, *args, **options):
- curs = connection.cursor()
- curs.execute("SELECT pg_try_advisory_lock(62387372)")
- if not curs.fetchall()[0][0]:
- raise CommandError("Failed to get advisory lock, existing twitter_post process stuck?")
-
- articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now()-timedelta(days=7)).order_by('date'))
- if not len(articles):
- return
-
- tw = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT,
- settings.TWITTER_CLIENTSECRET,
- settings.TWITTER_TOKEN,
- settings.TWITTER_TOKENSECRET)
-
- for a in articles:
- # We hardcode 30 chars for the URL shortener. And then 10 to cover the intro and spacing.
- statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140-40], settings.SITE_ROOT, a.id)
- r = tw.post('https://api.twitter.com/1.1/statuses/update.json', data={
- 'status': statusstr,
- })
- if r.status_code != 200:
- print("Failed to post to twitter: %s " % r)
- else:
- a.tweeted = True
- a.save()
- # Don't post more often than once / 30 seconds, to not trigger flooding.
- time.sleep(30)
+ help = 'Post to twitter'
+
+ def handle(self, *args, **options):
+ curs = connection.cursor()
+ curs.execute("SELECT pg_try_advisory_lock(62387372)")
+ if not curs.fetchall()[0][0]:
+ raise CommandError("Failed to get advisory lock, existing twitter_post process stuck?")
+
+ articles = list(NewsArticle.objects.filter(tweeted=False, approved=True, date__gt=datetime.now()-timedelta(days=7)).order_by('date'))
+ if not len(articles):
+ return
+
+ tw = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT,
+ settings.TWITTER_CLIENTSECRET,
+ settings.TWITTER_TOKEN,
+ settings.TWITTER_TOKENSECRET)
+
+ for a in articles:
+ # We hardcode 30 chars for the URL shortener. And then 10 to cover the intro and spacing.
+ statusstr = u"News: {0} {1}/about/news/{2}/".format(a.title[:140-40], settings.SITE_ROOT, a.id)
+ r = tw.post('https://api.twitter.com/1.1/statuses/update.json', data={
+ 'status': statusstr,
+ })
+ if r.status_code != 200:
+ print("Failed to post to twitter: %s " % r)
+ else:
+ a.tweeted = True
+ a.save()
+ # Don't post more often than once / 30 seconds, to not trigger flooding.
+ time.sleep(30)
import requests_oauthlib
class Command(BaseCommand):
- help = 'Register with twitter oauth'
-
- def handle(self, *args, **options):
- if not hasattr(settings, 'TWITTER_CLIENT'):
- raise CommandError("TWITTER_CLIENT must be set in settings_local.py")
- if not hasattr(settings, 'TWITTER_CLIENTSECRET'):
- raise CommandError("TWITTER_CLIENTSECRET must be set in settings_local.py")
- if hasattr(settings, 'TWITTER_TOKEN'):
- raise CommandError("TWITTER_TOKEN is already set in settings_local.py")
- if hasattr(settings, 'TWITTER_TOKENSECRET'):
- raise CommandError("TWITTER_TOKENSECRET is already set in settings_local.py")
-
- # OK, now we're good to go :)
- oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, settings.TWITTER_CLIENTSECRET)
- fetch_response = oauth.fetch_request_token('https://api.twitter.com/oauth/request_token')
-
- authorization_url = oauth.authorization_url('https://api.twitter.com/oauth/authorize')
- print 'Please go here and authorize: %s' % authorization_url
-
- pin = raw_input('Paste the PIN here: ')
-
- oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT,
- settings.TWITTER_CLIENTSECRET,
- resource_owner_key=fetch_response.get('oauth_token'),
- resource_owner_secret=fetch_response.get('oauth_token_secret'),
- verifier=pin)
- oauth_tokens = oauth.fetch_access_token('https://api.twitter.com/oauth/access_token')
-
- print("Authorized. Please configure:")
- print("TWITTER_TOKEN='%s'" % oauth_tokens.get('oauth_token'))
- print("TWITTER_TOKENSECRET='%s'" % oauth_tokens.get('oauth_token_secret'))
+ help = 'Register with twitter oauth'
+
+ def handle(self, *args, **options):
+ if not hasattr(settings, 'TWITTER_CLIENT'):
+ raise CommandError("TWITTER_CLIENT must be set in settings_local.py")
+ if not hasattr(settings, 'TWITTER_CLIENTSECRET'):
+ raise CommandError("TWITTER_CLIENTSECRET must be set in settings_local.py")
+ if hasattr(settings, 'TWITTER_TOKEN'):
+ raise CommandError("TWITTER_TOKEN is already set in settings_local.py")
+ if hasattr(settings, 'TWITTER_TOKENSECRET'):
+ raise CommandError("TWITTER_TOKENSECRET is already set in settings_local.py")
+
+ # OK, now we're good to go :)
+ oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT, settings.TWITTER_CLIENTSECRET)
+ fetch_response = oauth.fetch_request_token('https://api.twitter.com/oauth/request_token')
+
+ authorization_url = oauth.authorization_url('https://api.twitter.com/oauth/authorize')
+ print 'Please go here and authorize: %s' % authorization_url
+
+ pin = raw_input('Paste the PIN here: ')
+
+ oauth = requests_oauthlib.OAuth1Session(settings.TWITTER_CLIENT,
+ settings.TWITTER_CLIENTSECRET,
+ resource_owner_key=fetch_response.get('oauth_token'),
+ resource_owner_secret=fetch_response.get('oauth_token_secret'),
+ verifier=pin)
+ oauth_tokens = oauth.fetch_access_token('https://api.twitter.com/oauth/access_token')
+
+ print("Authorized. Please configure:")
+ print("TWITTER_TOKEN='%s'" % oauth_tokens.get('oauth_token'))
+ print("TWITTER_TOKENSECRET='%s'" % oauth_tokens.get('oauth_token_secret'))
from pgweb.core.models import Organisation
class NewsTag(models.Model):
- urlname = models.CharField(max_length=20, null=False, blank=False, unique=True)
- name = models.CharField(max_length=32, null=False, blank=False)
- description = models.CharField(max_length=200, null=False, blank=False)
+ urlname = models.CharField(max_length=20, null=False, blank=False, unique=True)
+ name = models.CharField(max_length=32, null=False, blank=False)
+ description = models.CharField(max_length=200, null=False, blank=False)
- def __unicode__(self):
- return self.name
+ def __unicode__(self):
+ return self.name
- class Meta:
- ordering = ('urlname', )
+ class Meta:
+ ordering = ('urlname', )
class NewsArticle(models.Model):
- org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
- approved = models.BooleanField(null=False, blank=False, default=False)
- date = models.DateField(null=False, blank=False, default=date.today)
- title = models.CharField(max_length=200, null=False, blank=False)
- content = models.TextField(null=False, blank=False)
- tweeted = models.BooleanField(null=False, blank=False, default=False)
- tags = models.ManyToManyField(NewsTag, blank=False, help_text="Hover mouse over tags to view full description")
-
- send_notification = True
- send_m2m_notification = True
- markdown_fields = ('content',)
-
- def purge_urls(self):
- yield '/about/news/%s/' % self.pk
- yield '/about/newsarchive/'
- yield '/news.rss'
- yield '/news/.*.rss'
- # FIXME: when to expire the front page?
- yield '/$'
-
- def __unicode__(self):
- return "%s: %s" % (self.date, self.title)
-
- def verify_submitter(self, user):
- return (len(self.org.managers.filter(pk=user.pk)) == 1)
-
- def is_migrated(self):
- if self.org.pk == 0:
- return True
- return False
-
- @property
- def displaydate(self):
- return self.date.strftime("%Y-%m-%d")
-
- class Meta:
- ordering = ('-date',)
+ org = models.ForeignKey(Organisation, null=False, blank=False, verbose_name="Organisation", help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
+ approved = models.BooleanField(null=False, blank=False, default=False)
+ date = models.DateField(null=False, blank=False, default=date.today)
+ title = models.CharField(max_length=200, null=False, blank=False)
+ content = models.TextField(null=False, blank=False)
+ tweeted = models.BooleanField(null=False, blank=False, default=False)
+ tags = models.ManyToManyField(NewsTag, blank=False, help_text="Hover mouse over tags to view full description")
+
+ send_notification = True
+ send_m2m_notification = True
+ markdown_fields = ('content',)
+
+ def purge_urls(self):
+ yield '/about/news/%s/' % self.pk
+ yield '/about/newsarchive/'
+ yield '/news.rss'
+ yield '/news/.*.rss'
+ # FIXME: when to expire the front page?
+ yield '/$'
+
+ def __unicode__(self):
+ return "%s: %s" % (self.date, self.title)
+
+ def verify_submitter(self, user):
+ return (len(self.org.managers.filter(pk=user.pk)) == 1)
+
+ def is_migrated(self):
+ if self.org.pk == 0:
+ return True
+ return False
+
+ @property
+ def displaydate(self):
+ return self.date.strftime("%Y-%m-%d")
+
+ class Meta:
+ ordering = ('-date',)
from models import NewsArticle
def get_struct():
- now = date.today()
- fouryearsago = date.today() - timedelta(4*365, 0, 0)
+ now = date.today()
+ fouryearsago = date.today() - timedelta(4*365, 0, 0)
- # We intentionally don't put /about/newsarchive/ in the sitemap,
- # since we don't care about getting it indexed.
- # Also, don't bother indexing anything > 4 years old
+ # We intentionally don't put /about/newsarchive/ in the sitemap,
+ # since we don't care about getting it indexed.
+ # Also, don't bother indexing anything > 4 years old
- for n in NewsArticle.objects.filter(approved=True, date__gt=fouryearsago):
- yearsold = (now - n.date).days / 365
- if yearsold > 4:
- yearsold = 4
- yield ('about/news/%s/' % n.id,
- 0.5-(yearsold/10.0))
+ for n in NewsArticle.objects.filter(approved=True, date__gt=fouryearsago):
+ yearsold = (now - n.date).days / 365
+ if yearsold > 4:
+ yearsold = 4
+ yield ('about/news/%s/' % n.id,
+ 0.5-(yearsold/10.0))
import json
def archive(request, tag=None, paging=None):
- if tag:
- tag = get_object_or_404(NewsTag,urlname=tag.strip('/'))
- news = NewsArticle.objects.filter(approved=True, tags=tag)
- else:
- tag = None
- news = NewsArticle.objects.filter(approved=True)
- return render_pgweb(request, 'about', 'news/newsarchive.html', {
- 'news': news,
- 'tag': tag,
- 'newstags': NewsTag.objects.all(),
- })
+ if tag:
+ tag = get_object_or_404(NewsTag,urlname=tag.strip('/'))
+ news = NewsArticle.objects.filter(approved=True, tags=tag)
+ else:
+ tag = None
+ news = NewsArticle.objects.filter(approved=True)
+ return render_pgweb(request, 'about', 'news/newsarchive.html', {
+ 'news': news,
+ 'tag': tag,
+ 'newstags': NewsTag.objects.all(),
+ })
def item(request, itemid, throwaway=None):
- news = get_object_or_404(NewsArticle, pk=itemid)
- if not news.approved:
- raise Http404
- return render_pgweb(request, 'about', 'news/item.html', {
- 'obj': news,
- 'newstags': NewsTag.objects.all(),
- })
+ news = get_object_or_404(NewsArticle, pk=itemid)
+ if not news.approved:
+ raise Http404
+ return render_pgweb(request, 'about', 'news/item.html', {
+ 'obj': news,
+ 'newstags': NewsTag.objects.all(),
+ })
def taglist_json(request):
- return HttpResponse(json.dumps({
- 'tags': [{'name': t.urlname, 'description': t.description} for t in NewsTag.objects.distinct('urlname')],
- }), content_type='application/json')
+ return HttpResponse(json.dumps({
+ 'tags': [{'name': t.urlname, 'description': t.description} for t in NewsTag.objects.distinct('urlname')],
+ }), content_type='application/json')
@login_required
def form(request, itemid):
- return simple_form(NewsArticle, itemid, request, NewsArticleForm,
- redirect='/account/edit/news/')
+ return simple_form(NewsArticle, itemid, request, NewsArticleForm,
+ redirect='/account/edit/news/')
from models import ProfessionalService
class ProfessionalServiceAdmin(PgwebAdmin):
- list_display = ('__unicode__', 'approved',)
- list_filter = ('approved',)
- search_fields = ('org__name',)
+ list_display = ('__unicode__', 'approved',)
+ list_filter = ('approved',)
+ search_fields = ('org__name',)
admin.site.register(ProfessionalService, ProfessionalServiceAdmin)
from models import ProfessionalService
class ProfessionalServiceForm(forms.ModelForm):
- form_intro = """Note that in order to register a new professional service, you must first register an organisation.
+ form_intro = """Note that in order to register a new professional service, you must first register an organisation.
If you have not done so, use <a href="/account/organisations/new/">this form</a>."""
- def __init__(self, *args, **kwargs):
- super(ProfessionalServiceForm, self).__init__(*args, **kwargs)
- def filter_by_user(self, user):
- self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
- class Meta:
- model = ProfessionalService
- exclude = ('submitter', 'approved', )
+ def __init__(self, *args, **kwargs):
+ super(ProfessionalServiceForm, self).__init__(*args, **kwargs)
+ def filter_by_user(self, user):
+ self.fields['org'].queryset = Organisation.objects.filter(managers=user, approved=True)
+ class Meta:
+ model = ProfessionalService
+ exclude = ('submitter', 'approved', )
from pgweb.core.models import Organisation
class ProfessionalService(models.Model):
- approved = models.BooleanField(null=False, blank=False, default=False)
-
- org = models.OneToOneField(Organisation, null=False, blank=False,
- db_column="organisation_id",
- verbose_name="organisation",
- help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
- description = models.TextField(null=False,blank=False)
- employees = models.CharField(max_length=32, null=True, blank=True)
- locations = models.CharField(max_length=128, null=True, blank=True)
- region_africa = models.BooleanField(null=False, default=False, verbose_name="Africa")
- region_asia = models.BooleanField(null=False, default=False, verbose_name="Asia")
- region_europe = models.BooleanField(null=False, default=False, verbose_name="Europe")
- region_northamerica = models.BooleanField(null=False, default=False, verbose_name="North America")
- region_oceania = models.BooleanField(null=False, default=False, verbose_name="Oceania")
- region_southamerica = models.BooleanField(null=False, default=False, verbose_name="South America")
- hours = models.CharField(max_length=128, null=True, blank=True)
- languages = models.CharField(max_length=128, null=True, blank=True)
- customerexample = models.TextField(blank=True, null=True, verbose_name="Customer Example")
- experience = models.TextField(blank=True, null=True)
- contact = models.TextField(null=True, blank=True)
- url = models.URLField(max_length=128, null=True, blank=True, verbose_name="URL")
- provides_support = models.BooleanField(null=False, default=False)
- provides_hosting = models.BooleanField(null=False, default=False)
- interfaces = models.CharField(max_length=512, null=True, blank=True, verbose_name="Interfaces (for hosting)")
-
- purge_urls = ('/support/professional_', )
-
- send_notification = True
-
- def verify_submitter(self, user):
- return (len(self.org.managers.filter(pk=user.pk)) == 1)
-
- def __unicode__(self):
- return self.org.name
-
- class Meta:
- ordering = ('org__name',)
+ approved = models.BooleanField(null=False, blank=False, default=False)
+
+ org = models.OneToOneField(Organisation, null=False, blank=False,
+ db_column="organisation_id",
+ verbose_name="organisation",
+ help_text="If no organisations are listed, please check the <a href=\"/account/orglist/\">organisation list</a> and contact the organisation manager or <a href=\"mailto:webmaster@postgresql.org\">webmaster@postgresql.org</a> if none are listed.")
+ description = models.TextField(null=False,blank=False)
+ employees = models.CharField(max_length=32, null=True, blank=True)
+ locations = models.CharField(max_length=128, null=True, blank=True)
+ region_africa = models.BooleanField(null=False, default=False, verbose_name="Africa")
+ region_asia = models.BooleanField(null=False, default=False, verbose_name="Asia")
+ region_europe = models.BooleanField(null=False, default=False, verbose_name="Europe")
+ region_northamerica = models.BooleanField(null=False, default=False, verbose_name="North America")
+ region_oceania = models.BooleanField(null=False, default=False, verbose_name="Oceania")
+ region_southamerica = models.BooleanField(null=False, default=False, verbose_name="South America")
+ hours = models.CharField(max_length=128, null=True, blank=True)
+ languages = models.CharField(max_length=128, null=True, blank=True)
+ customerexample = models.TextField(blank=True, null=True, verbose_name="Customer Example")
+ experience = models.TextField(blank=True, null=True)
+ contact = models.TextField(null=True, blank=True)
+ url = models.URLField(max_length=128, null=True, blank=True, verbose_name="URL")
+ provides_support = models.BooleanField(null=False, default=False)
+ provides_hosting = models.BooleanField(null=False, default=False)
+ interfaces = models.CharField(max_length=512, null=True, blank=True, verbose_name="Interfaces (for hosting)")
+
+ purge_urls = ('/support/professional_', )
+
+ send_notification = True
+
+ def verify_submitter(self, user):
+ return (len(self.org.managers.filter(pk=user.pk)) == 1)
+
+ def __unicode__(self):
+ return self.org.name
+
+ class Meta:
+ ordering = ('org__name',)
from views import regions
def get_struct():
- for key, name in regions:
- yield ('support/professional_support/%s/' % key, None)
- yield ('support/professional_hosting/%s/' % key, None)
+ for key, name in regions:
+ yield ('support/professional_support/%s/' % key, None)
+ yield ('support/professional_hosting/%s/' % key, None)
)
def root(request, servtype):
- title = servtype=='support' and 'Professional Services' or 'Hosting Providers'
- what = servtype=='support' and 'support' or 'hosting'
- support = servtype=='support'
- return render_pgweb(request, 'support', 'profserv/root.html', {
- 'title': title,
- 'support': support,
- 'regions': regions,
- 'what': what,
- })
+ title = servtype=='support' and 'Professional Services' or 'Hosting Providers'
+ what = servtype=='support' and 'support' or 'hosting'
+ support = servtype=='support'
+ return render_pgweb(request, 'support', 'profserv/root.html', {
+ 'title': title,
+ 'support': support,
+ 'regions': regions,
+ 'what': what,
+ })
def region(request, servtype, regionname):
- regname = [n for r,n in regions if r==regionname]
- if not regname:
- raise Http404
- regname = regname[0]
-
- what = servtype=='support' and 'support' or 'hosting'
- whatname = servtype=='support' and 'Professional Services' or 'Hosting Providers'
- title = "%s - %s" % (whatname, regname)
- support = servtype=='support'
-
- # DB model is a bit funky here, so use the extra-where functionality to filter properly.
- # Field names are cleaned up earlier, so it's safe against injections.
- services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what),])
-
- return render_pgweb(request, 'support', 'profserv/list.html', {
- 'title': title,
- 'support': support,
- 'what': what,
- 'whatname': whatname,
- 'regionname': regname,
- 'services': services,
- })
+ regname = [n for r,n in regions if r==regionname]
+ if not regname:
+ raise Http404
+ regname = regname[0]
+
+ what = servtype=='support' and 'support' or 'hosting'
+ whatname = servtype=='support' and 'Professional Services' or 'Hosting Providers'
+ title = "%s - %s" % (whatname, regname)
+ support = servtype=='support'
+
+ # DB model is a bit funky here, so use the extra-where functionality to filter properly.
+ # Field names are cleaned up earlier, so it's safe against injections.
+ services = ProfessionalService.objects.select_related('org').filter(approved=True).extra(where=["region_%s AND provides_%s" % (regionname, what),])
+
+ return render_pgweb(request, 'support', 'profserv/list.html', {
+ 'title': title,
+ 'support': support,
+ 'what': what,
+ 'whatname': whatname,
+ 'regionname': regname,
+ 'services': services,
+ })
# Forms to edit
@login_required
def profservform(request, itemid):
- return simple_form(ProfessionalService, itemid, request, ProfessionalServiceForm,
- redirect='/account/edit/services/')
+ return simple_form(ProfessionalService, itemid, request, ProfessionalServiceForm,
+ redirect='/account/edit/services/')
from models import PUG
class PUGAdmin(PgwebAdmin):
- list_display = ('title', 'approved', )
- list_filter = ('approved', )
- search_fields = ('title', )
+ list_display = ('title', 'approved', )
+ list_filter = ('approved', )
+ search_fields = ('title', )
admin.site.register(PUG, PUGAdmin)
from django.db import models
class PUG(models.Model):
- """
- contains information about a local PostgreSQL user group
- """
- country = models.ForeignKey('core.Country')
- org = models.ForeignKey('core.Organisation', null=True, blank=True, help_text='Organisation that manages the PUG and its contents')
- approved = models.BooleanField(null=False, blank=False, default=False)
- locale = models.CharField(max_length=255, help_text="Locale where the PUG meets, e.g. 'New York City'")
- title = models.CharField(max_length=255, help_text="Title/Name of the PUG, e.g. 'NYC PostgreSQL User Group'")
- website_url = models.TextField(null=True, blank=True)
- mailing_list_url = models.TextField(null=True, blank=True)
+ """
+ contains information about a local PostgreSQL user group
+ """
+ country = models.ForeignKey('core.Country')
+ org = models.ForeignKey('core.Organisation', null=True, blank=True, help_text='Organisation that manages the PUG and its contents')
+ approved = models.BooleanField(null=False, blank=False, default=False)
+ locale = models.CharField(max_length=255, help_text="Locale where the PUG meets, e.g. 'New York City'")
+ title = models.CharField(max_length=255, help_text="Title/Name of the PUG, e.g. 'NYC PostgreSQL User Group'")
+ website_url = models.TextField(null=True, blank=True)
+ mailing_list_url = models.TextField(null=True, blank=True)
- purge_urls = ('/community/user-groups/', )
- send_notification = True
+ purge_urls = ('/community/user-groups/', )
+ send_notification = True
- def __unicode__(self):
- return self.title
+ def __unicode__(self):
+ return self.title
from models import PUG
def index(request):
- """
- contains list of PUGs, in country/locale alphabetical order
- """
- pug_list = []
- for pug in PUG.objects.filter(approved=True).order_by('country__name', 'locale').all():
- if pug_list and pug_list[-1].get('country') == pug.country.name:
- pug_list[-1]['pugs'].append(pug)
- else:
- pug_list.append({
- 'country': pug.country.name,
- 'pugs': [pug]
- })
- return render_pgweb(request, 'community', 'pugs/index.html', {
- 'pug_list': pug_list,
- })
+ """
+ contains list of PUGs, in country/locale alphabetical order
+ """
+ pug_list = []
+ for pug in PUG.objects.filter(approved=True).order_by('country__name', 'locale').all():
+ if pug_list and pug_list[-1].get('country') == pug.country.name:
+ pug_list[-1]['pugs'].append(pug)
+ else:
+ pug_list.append({
+ 'country': pug.country.name,
+ 'pugs': [pug]
+ })
+ return render_pgweb(request, 'community', 'pugs/index.html', {
+ 'pug_list': pug_list,
+ })
from models import Quote
class QuoteAdmin(admin.ModelAdmin):
- list_display = ('quote', 'who', 'org', )
+ list_display = ('quote', 'who', 'org', )
admin.site.register(Quote, QuoteAdmin)
from django.db import models
class Quote(models.Model):
- approved = models.BooleanField(null=False, default=False)
- quote = models.TextField(null=False, blank=False)
- who = models.CharField(max_length=100, null=False, blank=False)
- org = models.CharField(max_length=100, null=False, blank=False)
- link = models.URLField(null=False, blank=False)
+ approved = models.BooleanField(null=False, default=False)
+ quote = models.TextField(null=False, blank=False)
+ who = models.CharField(max_length=100, null=False, blank=False)
+ org = models.CharField(max_length=100, null=False, blank=False)
+ link = models.URLField(null=False, blank=False)
- send_notification = True
+ send_notification = True
- purge_urls = ('/about/quotesarchive/', '/$', )
+ purge_urls = ('/about/quotesarchive/', '/$', )
- def __unicode__(self):
- if len(self.quote) > 75:
- return "%s..." % self.quote[:75]
- else:
- return self.quote
+ def __unicode__(self):
+ if len(self.quote) > 75:
+ return "%s..." % self.quote[:75]
+ else:
+ return self.quote
# Conditionally import memcached library. Everything will work without
# it, so we allow development installs to run without it...
try:
- import pylibmc
- has_memcached=True
+ import pylibmc
+ has_memcached=True
except:
- has_memcached=False
+ has_memcached=False
def generate_pagelinks(pagenum, totalpages, querystring):
- # Generate a list of links to page through a search result
- # We generate these in HTML from the python code because it's
- # simply too ugly to try to do it in the template.
- if totalpages < 2:
- return
+ # Generate a list of links to page through a search result
+ # We generate these in HTML from the python code because it's
+ # simply too ugly to try to do it in the template.
+ if totalpages < 2:
+ return
- if pagenum > 1:
- # Prev link
- yield '<a href="%s&p=%s">Prev</a>' % (querystring, pagenum-1)
+ if pagenum > 1:
+ # Prev link
+ yield '<a href="%s&p=%s">Prev</a>' % (querystring, pagenum-1)
- if pagenum > 10:
- start = pagenum - 10
- else:
- start = 1
+ if pagenum > 10:
+ start = pagenum - 10
+ else:
+ start = 1
- for i in range(start, min(start+20, totalpages + 1)):
- if i == pagenum:
- yield "%s" % i
- else:
- yield '<a href="%s&p=%s">%s</a>' % (querystring, i, i)
+ for i in range(start, min(start+20, totalpages + 1)):
+ if i == pagenum:
+ yield "%s" % i
+ else:
+ yield '<a href="%s&p=%s">%s</a>' % (querystring, i, i)
- if pagenum != min(start+20, totalpages):
- yield '<a href="%s&p=%s">Next</a>' % (querystring, pagenum+1)
+ if pagenum != min(start+20, totalpages):
+ yield '<a href="%s&p=%s">Next</a>' % (querystring, pagenum+1)
@csrf_exempt
@cache(minutes=15)
def search(request):
- # Perform a general web search
- # Since this lives in a different database, we open a direct
- # connection with psycopg, thus bypassing everything that has to do
- # with django.
-
- # constants that we might eventually want to make configurable
- hitsperpage = 20
-
- if request.GET.has_key('m') and request.GET['m'] == '1':
- searchlists = True
-
- if request.GET.has_key('l'):
- if request.GET['l'] != '':
- try:
- listid = int(request.GET['l'])
- except:
- listid = None
- else:
- listid = None
- else:
- # Listid not specified. But do we have the name?
- if request.GET.has_key('ln'):
- try:
- ll = MailingList.objects.get(listname=request.GET['ln'])
- listid = ll.id
- except MailingList.DoesNotExist:
- # Invalid list name just resets the default of the form,
- # no need to throw an error.
- listid = None
- else:
- listid = None
-
- if request.GET.has_key('d'):
- try:
- dateval = int(request.GET['d'])
- except:
- dateval = None
- else:
- dateval = None
-
- if request.GET.has_key('s'):
- listsort = request.GET['s']
- if not listsort in ('r', 'd', 'i'):
- listsort = 'r'
- else:
- listsort = 'r'
-
- if not dateval:
- dateval = 365
-
- sortoptions = (
- {'val':'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')},
- {'val':'d', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'},
- {'val':'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'},
- )
- dateoptions = (
- {'val': -1, 'text': 'anytime'},
- {'val': 1, 'text': 'within last day'},
- {'val': 7, 'text': 'within last week'},
- {'val': 31, 'text': 'within last month'},
- {'val': 186, 'text': 'within last 6 months'},
- {'val': 365, 'text': 'within last year'},
- )
- else:
- searchlists = False
- if request.GET.has_key('u'):
- suburl = request.GET['u']
- else:
- suburl = None
-
- if request.GET.has_key('a'):
- allsites = (request.GET['a'] == "1")
- else:
- allsites = False
-
- # Check that we actually have something to search for
- if not request.GET.has_key('q') or request.GET['q'] == '':
- if searchlists:
- return render(request, 'search/listsearch.html', {
- 'search_error': "No search term specified.",
- 'sortoptions': sortoptions,
- 'lists': MailingList.objects.all().order_by("group__sortkey"),
- 'listid': listid,
- 'dates': dateoptions,
- 'dateval': dateval,
- })
- else:
- return render(request, 'search/sitesearch.html', {
- 'search_error': "No search term specified.",
- })
- query = request.GET['q'].strip()
-
- # Anti-stefan prevention
- if len(query) > 1000:
- return render(request, 'search/sitesearch.html', {
- 'search_error': "Search term too long.",
- })
-
- # Is the request being paged?
- if request.GET.has_key('p'):
- try:
- pagenum = int(request.GET['p'])
- except:
- pagenum = 1
- else:
- pagenum = 1
-
- firsthit = (pagenum - 1) * hitsperpage + 1
-
- if searchlists:
- # Lists are searched by passing the work down using a http
- # API. In the future, we probably want to do everything
- # through a http API and merge hits, but that's for later
- p = {
- 'q': query.encode('utf-8'),
- 's': listsort,
- }
- if listid:
- if listid < 0:
- # This is a list group, we expand that on the web server
- p['ln'] = ','.join([x.listname for x in MailingList.objects.filter(group=-listid)])
- else:
- p['ln'] = MailingList.objects.get(pk=listid).listname
- if dateval:
- p['d'] = dateval
- urlstr = urllib.urlencode(p)
- # If memcached is available, let's try it
- hits = None
- if has_memcached:
- memc = pylibmc.Client(['127.0.0.1',], binary=True)
- # behavior not supported on pylibmc in squeeze:: behaviors={'tcp_nodelay':True})
- try:
- hits = memc.get(urlstr)
- except Exception:
- # If we had an exception, don't try to store either
- memc = None
- if not hits:
- # No hits found - so try to get them from the search server
- if settings.ARCHIVES_SEARCH_PLAINTEXT:
- c = httplib.HTTPConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
- else:
- c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
- c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'})
- c.sock.settimeout(20) # Set a 20 second timeout
- try:
- r = c.getresponse()
- except (socket.timeout, ssl.SSLError):
- return render(request, 'search/listsearch.html', {
- 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.',
- })
- if r.status != 200:
- memc = None
- return render(request, 'search/listsearch.html', {
- 'search_error': 'Error talking to search server: %s' % r.reason,
- })
- hits = json.loads(r.read())
- if has_memcached and memc:
- # Store them in memcached too! But only for 10 minutes...
- # And always compress it, just because we can
- memc.set(urlstr, hits, 60*10, 1)
- memc = None
-
- if isinstance(hits, dict):
- # This is not just a list of hits.
- # Right now the only supported dict result is a messageid
- # match, but make sure that's what it is.
- if hits['messageidmatch'] == 1:
- return HttpResponseRedirect("/message-id/%s" % query)
-
- totalhits = len(hits)
- querystr = "?m=1&q=%s&l=%s&d=%s&s=%s" % (
- urllib.quote_plus(query.encode('utf-8')),
- listid or '',
- dateval,
- listsort
- )
-
- return render(request, 'search/listsearch.html', {
- 'hitcount': totalhits,
- 'firsthit': firsthit,
- 'lasthit': min(totalhits, firsthit+hitsperpage-1),
- 'query': request.GET['q'],
- 'pagelinks': " ".join(
- generate_pagelinks(pagenum,
- totalhits / hitsperpage + 1,
- querystr)),
- 'hits': [{
- 'date': h['d'],
- 'subject': h['s'],
- 'author': h['f'],
- 'messageid': h['m'],
- 'abstract': h['a'],
- 'rank': h['r'],
- } for h in hits[firsthit-1:firsthit+hitsperpage-1]],
- 'sortoptions': sortoptions,
- 'lists': MailingList.objects.all().order_by("group__sortkey"),
- 'listid': listid,
- 'dates': dateoptions,
- 'dateval': dateval,
- })
-
- else:
- # Website search is still done by making a regular pgsql connection
- # to the search server.
- try:
- conn = psycopg2.connect(settings.SEARCH_DSN)
- curs = conn.cursor()
- except:
- return render(request, 'search/sitesearch.html', {
- 'search_error': 'Could not connect to search database.'
- })
-
- # This is kind of a hack, but... Some URLs are flagged as internal
- # and should as such only be included in searches that explicitly
- # reference the suburl that they are in.
- if suburl and suburl.startswith('/docs/devel'):
- include_internal = True
- else:
- include_internal = False
-
- # perform the query for general web search
- try:
- curs.execute("SELECT * FROM site_search(%(query)s, %(firsthit)s, %(hitsperpage)s, %(allsites)s, %(suburl)s, %(internal)s)", {
- 'query': query,
- 'firsthit': firsthit - 1,
- 'hitsperpage': hitsperpage,
- 'allsites': allsites,
- 'suburl': suburl,
- 'internal': include_internal,
- })
- except psycopg2.ProgrammingError:
- return render(request, 'search/sitesearch.html', {
- 'search_error': 'Error executing search query.'
- })
-
- hits = curs.fetchall()
- conn.close()
- totalhits = int(hits[-1][5])
- try:
- if suburl:
- quoted_suburl = urllib.quote_plus(suburl)
- else:
- quoted_suburl = ''
- except:
- quoted_suburl = ''
- querystr = "?q=%s&a=%s&u=%s" % (
- urllib.quote_plus(query.encode('utf-8')),
- allsites and "1" or "0",
- quoted_suburl,
- )
-
- return render(request, 'search/sitesearch.html', {
- 'suburl': suburl,
- 'allsites': allsites,
- 'hitcount': totalhits,
- 'firsthit': firsthit,
- 'lasthit': min(totalhits, firsthit+hitsperpage-1),
- 'query': request.GET['q'],
- 'pagelinks': " ".join(
- generate_pagelinks(pagenum,
- totalhits / hitsperpage + 1,
- querystr)),
- 'hits': [{
- 'title': h[3],
- 'url': "%s%s" % (h[1], h[2]),
- 'abstract': h[4].replace("[[[[[[", "<strong>").replace("]]]]]]","</strong>"),
- 'rank': h[5]} for h in hits[:-1]],
- })
+ # Perform a general web search
+ # Since this lives in a different database, we open a direct
+ # connection with psycopg, thus bypassing everything that has to do
+ # with django.
+
+ # constants that we might eventually want to make configurable
+ hitsperpage = 20
+
+ if request.GET.has_key('m') and request.GET['m'] == '1':
+ searchlists = True
+
+ if request.GET.has_key('l'):
+ if request.GET['l'] != '':
+ try:
+ listid = int(request.GET['l'])
+ except:
+ listid = None
+ else:
+ listid = None
+ else:
+ # Listid not specified. But do we have the name?
+ if request.GET.has_key('ln'):
+ try:
+ ll = MailingList.objects.get(listname=request.GET['ln'])
+ listid = ll.id
+ except MailingList.DoesNotExist:
+ # Invalid list name just resets the default of the form,
+ # no need to throw an error.
+ listid = None
+ else:
+ listid = None
+
+ if request.GET.has_key('d'):
+ try:
+ dateval = int(request.GET['d'])
+ except:
+ dateval = None
+ else:
+ dateval = None
+
+ if request.GET.has_key('s'):
+ listsort = request.GET['s']
+ if not listsort in ('r', 'd', 'i'):
+ listsort = 'r'
+ else:
+ listsort = 'r'
+
+ if not dateval:
+ dateval = 365
+
+ sortoptions = (
+ {'val':'r', 'text': 'Rank', 'selected': not (request.GET.has_key('s') and request.GET['s'] == 'd')},
+ {'val':'d', 'text': 'Date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'd'},
+ {'val':'i', 'text': 'Reverse date', 'selected': request.GET.has_key('s') and request.GET['s'] == 'i'},
+ )
+ dateoptions = (
+ {'val': -1, 'text': 'anytime'},
+ {'val': 1, 'text': 'within last day'},
+ {'val': 7, 'text': 'within last week'},
+ {'val': 31, 'text': 'within last month'},
+ {'val': 186, 'text': 'within last 6 months'},
+ {'val': 365, 'text': 'within last year'},
+ )
+ else:
+ searchlists = False
+ if request.GET.has_key('u'):
+ suburl = request.GET['u']
+ else:
+ suburl = None
+
+ if request.GET.has_key('a'):
+ allsites = (request.GET['a'] == "1")
+ else:
+ allsites = False
+
+ # Check that we actually have something to search for
+ if not request.GET.has_key('q') or request.GET['q'] == '':
+ if searchlists:
+ return render(request, 'search/listsearch.html', {
+ 'search_error': "No search term specified.",
+ 'sortoptions': sortoptions,
+ 'lists': MailingList.objects.all().order_by("group__sortkey"),
+ 'listid': listid,
+ 'dates': dateoptions,
+ 'dateval': dateval,
+ })
+ else:
+ return render(request, 'search/sitesearch.html', {
+ 'search_error': "No search term specified.",
+ })
+ query = request.GET['q'].strip()
+
+ # Anti-stefan prevention
+ if len(query) > 1000:
+ return render(request, 'search/sitesearch.html', {
+ 'search_error': "Search term too long.",
+ })
+
+ # Is the request being paged?
+ if request.GET.has_key('p'):
+ try:
+ pagenum = int(request.GET['p'])
+ except:
+ pagenum = 1
+ else:
+ pagenum = 1
+
+ firsthit = (pagenum - 1) * hitsperpage + 1
+
+ if searchlists:
+ # Lists are searched by passing the work down using a http
+ # API. In the future, we probably want to do everything
+ # through a http API and merge hits, but that's for later
+ p = {
+ 'q': query.encode('utf-8'),
+ 's': listsort,
+ }
+ if listid:
+ if listid < 0:
+ # This is a list group, we expand that on the web server
+ p['ln'] = ','.join([x.listname for x in MailingList.objects.filter(group=-listid)])
+ else:
+ p['ln'] = MailingList.objects.get(pk=listid).listname
+ if dateval:
+ p['d'] = dateval
+ urlstr = urllib.urlencode(p)
+ # If memcached is available, let's try it
+ hits = None
+ if has_memcached:
+ memc = pylibmc.Client(['127.0.0.1',], binary=True)
+ # behavior not supported on pylibmc in squeeze:: behaviors={'tcp_nodelay':True})
+ try:
+ hits = memc.get(urlstr)
+ except Exception:
+ # If we had an exception, don't try to store either
+ memc = None
+ if not hits:
+ # No hits found - so try to get them from the search server
+ if settings.ARCHIVES_SEARCH_PLAINTEXT:
+ c = httplib.HTTPConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
+ else:
+ c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
+ c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'})
+ c.sock.settimeout(20) # Set a 20 second timeout
+ try:
+ r = c.getresponse()
+ except (socket.timeout, ssl.SSLError):
+ return render(request, 'search/listsearch.html', {
+ 'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.',
+ })
+ if r.status != 200:
+ memc = None
+ return render(request, 'search/listsearch.html', {
+ 'search_error': 'Error talking to search server: %s' % r.reason,
+ })
+ hits = json.loads(r.read())
+ if has_memcached and memc:
+ # Store them in memcached too! But only for 10 minutes...
+ # And always compress it, just because we can
+ memc.set(urlstr, hits, 60*10, 1)
+ memc = None
+
+ if isinstance(hits, dict):
+ # This is not just a list of hits.
+ # Right now the only supported dict result is a messageid
+ # match, but make sure that's what it is.
+ if hits['messageidmatch'] == 1:
+ return HttpResponseRedirect("/message-id/%s" % query)
+
+ totalhits = len(hits)
+ querystr = "?m=1&q=%s&l=%s&d=%s&s=%s" % (
+ urllib.quote_plus(query.encode('utf-8')),
+ listid or '',
+ dateval,
+ listsort
+ )
+
+ return render(request, 'search/listsearch.html', {
+ 'hitcount': totalhits,
+ 'firsthit': firsthit,
+ 'lasthit': min(totalhits, firsthit+hitsperpage-1),
+ 'query': request.GET['q'],
+ 'pagelinks': " ".join(
+ generate_pagelinks(pagenum,
+ totalhits / hitsperpage + 1,
+ querystr)),
+ 'hits': [{
+ 'date': h['d'],
+ 'subject': h['s'],
+ 'author': h['f'],
+ 'messageid': h['m'],
+ 'abstract': h['a'],
+ 'rank': h['r'],
+ } for h in hits[firsthit-1:firsthit+hitsperpage-1]],
+ 'sortoptions': sortoptions,
+ 'lists': MailingList.objects.all().order_by("group__sortkey"),
+ 'listid': listid,
+ 'dates': dateoptions,
+ 'dateval': dateval,
+ })
+
+ else:
+ # Website search is still done by making a regular pgsql connection
+ # to the search server.
+ try:
+ conn = psycopg2.connect(settings.SEARCH_DSN)
+ curs = conn.cursor()
+ except:
+ return render(request, 'search/sitesearch.html', {
+ 'search_error': 'Could not connect to search database.'
+ })
+
+ # This is kind of a hack, but... Some URLs are flagged as internal
+ # and should as such only be included in searches that explicitly
+ # reference the suburl that they are in.
+ if suburl and suburl.startswith('/docs/devel'):
+ include_internal = True
+ else:
+ include_internal = False
+
+ # perform the query for general web search
+ try:
+ curs.execute("SELECT * FROM site_search(%(query)s, %(firsthit)s, %(hitsperpage)s, %(allsites)s, %(suburl)s, %(internal)s)", {
+ 'query': query,
+ 'firsthit': firsthit - 1,
+ 'hitsperpage': hitsperpage,
+ 'allsites': allsites,
+ 'suburl': suburl,
+ 'internal': include_internal,
+ })
+ except psycopg2.ProgrammingError:
+ return render(request, 'search/sitesearch.html', {
+ 'search_error': 'Error executing search query.'
+ })
+
+ hits = curs.fetchall()
+ conn.close()
+ totalhits = int(hits[-1][5])
+ try:
+ if suburl:
+ quoted_suburl = urllib.quote_plus(suburl)
+ else:
+ quoted_suburl = ''
+ except:
+ quoted_suburl = ''
+ querystr = "?q=%s&a=%s&u=%s" % (
+ urllib.quote_plus(query.encode('utf-8')),
+ allsites and "1" or "0",
+ quoted_suburl,
+ )
+
+ return render(request, 'search/sitesearch.html', {
+ 'suburl': suburl,
+ 'allsites': allsites,
+ 'hitcount': totalhits,
+ 'firsthit': firsthit,
+ 'lasthit': min(totalhits, firsthit+hitsperpage-1),
+ 'query': request.GET['q'],
+ 'pagelinks': " ".join(
+ generate_pagelinks(pagenum,
+ totalhits / hitsperpage + 1,
+ querystr)),
+ 'hits': [{
+ 'title': h[3],
+ 'url': "%s%s" % (h[1], h[2]),
+ 'abstract': h[4].replace("[[[[[[", "<strong>").replace("]]]]]]","</strong>"),
+ 'rank': h[5]} for h in hits[:-1]],
+ })
from models import SecurityPatch, SecurityPatchVersion
class VersionChoiceField(forms.ModelChoiceField):
- def label_from_instance(self, obj):
- return obj.numtree
+ def label_from_instance(self, obj):
+ return obj.numtree
class SecurityPatchVersionAdminForm(forms.ModelForm):
- model = SecurityPatchVersion
- version = VersionChoiceField(queryset=Version.objects.filter(tree__gt=0), required=True)
+ model = SecurityPatchVersion
+ version = VersionChoiceField(queryset=Version.objects.filter(tree__gt=0), required=True)
class SecurityPatchVersionAdmin(admin.TabularInline):
- model = SecurityPatchVersion
- extra = 2
- form = SecurityPatchVersionAdminForm
+ model = SecurityPatchVersion
+ extra = 2
+ form = SecurityPatchVersionAdminForm
class SecurityPatchForm(forms.ModelForm):
- model = SecurityPatch
- newspost = forms.ModelChoiceField(queryset=NewsArticle.objects.filter(org=settings.PGDG_ORG_ID), required=False)
+ model = SecurityPatch
+ newspost = forms.ModelChoiceField(queryset=NewsArticle.objects.filter(org=settings.PGDG_ORG_ID), required=False)
- def clean(self):
- d = super(SecurityPatchForm, self).clean()
- vecs = [v for k,v in d.items() if k.startswith('vector_')]
- empty = [v for v in vecs if v == '']
- if len(empty) != len(vecs) and len(empty) != 0:
- for k in d.keys():
- if k.startswith('vector_'):
- self.add_error(k, 'Either specify all vector values or none')
- return d
+ def clean(self):
+ d = super(SecurityPatchForm, self).clean()
+ vecs = [v for k,v in d.items() if k.startswith('vector_')]
+ empty = [v for v in vecs if v == '']
+ if len(empty) != len(vecs) and len(empty) != 0:
+ for k in d.keys():
+ if k.startswith('vector_'):
+ self.add_error(k, 'Either specify all vector values or none')
+ return d
class SecurityPatchAdmin(admin.ModelAdmin):
- form = SecurityPatchForm
- exclude = ['cvenumber', ]
- inlines = (SecurityPatchVersionAdmin, )
- list_display = ('cve', 'public', 'cvssscore', 'legacyscore', 'cvssvector', 'description')
- actions = ['make_public', 'make_unpublic']
+ form = SecurityPatchForm
+ exclude = ['cvenumber', ]
+ inlines = (SecurityPatchVersionAdmin, )
+ list_display = ('cve', 'public', 'cvssscore', 'legacyscore', 'cvssvector', 'description')
+ actions = ['make_public', 'make_unpublic']
- def cvssvector(self, obj):
- if not obj.cvssvector:
- return ''
- return '<a href="https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator?vector={0}">{0}</a>'.format(
- obj.cvssvector)
- cvssvector.allow_tags = True
- cvssvector.short_description = "CVSS vector link"
+ def cvssvector(self, obj):
+ if not obj.cvssvector:
+ return ''
+ return '<a href="https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator?vector={0}">{0}</a>'.format(
+ obj.cvssvector)
+ cvssvector.allow_tags = True
+ cvssvector.short_description = "CVSS vector link"
- def cvssscore(self, obj):
- return obj.cvssscore
- cvssscore.short_description = "CVSS score"
+ def cvssscore(self, obj):
+ return obj.cvssscore
+ cvssscore.short_description = "CVSS score"
- def make_public(self, request, queryset):
- self.do_public(queryset, True)
- def make_unpublic(self, request, queryset):
- self.do_public(queryset, False)
- def do_public(self, queryset, val):
- # Intentionally loop and do manually, so we generate change notices
- for p in queryset.all():
- p.public=val
- p.save()
+ def make_public(self, request, queryset):
+ self.do_public(queryset, True)
+ def make_unpublic(self, request, queryset):
+ self.do_public(queryset, False)
+ def do_public(self, queryset, val):
+ # Intentionally loop and do manually, so we generate change notices
+ for p in queryset.all():
+ p.public=val
+ p.save()
admin.site.register(SecurityPatch, SecurityPatchAdmin)
import requests
class Command(BaseCommand):
- help = 'Update CVE links'
+ help = 'Update CVE links'
- def handle(self, *args, **options):
- with transaction.atomic():
- newly_visible = []
- for s in SecurityPatch.objects.filter(cve_visible=False):
- r = requests.get(s.cvelink, timeout=10)
- if r.status_code == 200:
- newly_visible.append(s.cve)
- s.cve_visible = True
- s.save()
- if newly_visible:
- send_simple_mail(settings.NOTIFICATION_FROM,
- settings.NOTIFICATION_EMAIL,
- "CVE entries made public",
- """The following CVE entries are now public upstream,
+ def handle(self, *args, **options):
+ with transaction.atomic():
+ newly_visible = []
+ for s in SecurityPatch.objects.filter(cve_visible=False):
+ r = requests.get(s.cvelink, timeout=10)
+ if r.status_code == 200:
+ newly_visible.append(s.cve)
+ s.cve_visible = True
+ s.save()
+ if newly_visible:
+ send_simple_mail(settings.NOTIFICATION_FROM,
+ settings.NOTIFICATION_EMAIL,
+ "CVE entries made public",
+ """The following CVE entries are now public upstream,
and have been made visible on the website.
{0}
""".format("\n".join(newly_visible)))
- map(varnish_purge, SecurityPatch.purge_urls)
+ map(varnish_purge, SecurityPatch.purge_urls)
name='cve_visible',
field=models.BooleanField(default=True),
),
- migrations.AlterField(
+ migrations.AlterField(
model_name='securitypatch',
name='cve_visible',
field=models.BooleanField(default=False),
vector_choices = {k:list(v.items()) for k,v in cvss.constants3.METRICS_VALUE_NAMES.items()}
component_choices = (
- ('core server', 'Core server product'),
- ('client', 'Client library or application only'),
- ('contrib module', 'Contrib module only'),
- ('client contrib module', 'Client contrib module only'),
- ('packaging', 'Packaging, e.g. installers or RPM'),
- ('other', 'Other'),
+ ('core server', 'Core server product'),
+ ('client', 'Client library or application only'),
+ ('contrib module', 'Contrib module only'),
+ ('client contrib module', 'Client contrib module only'),
+ ('packaging', 'Packaging, e.g. installers or RPM'),
+ ('other', 'Other'),
)
re_cve = re.compile('^(\d{4})-(\d{4,5})$')
def cve_validator(val):
- if not re_cve.match(val):
- raise ValidationError("Enter CVE in format 0000-0000 without the CVE text")
+ if not re_cve.match(val):
+ raise ValidationError("Enter CVE in format 0000-0000 without the CVE text")
def other_vectors_validator(val):
- if val != val.upper():
- raise ValidationError("Vector must be uppercase")
-
- try:
- for vector in val.split('/'):
- k,v = vector.split(':')
- if not cvss.constants3.METRICS_VALUES.has_key(k):
- raise ValidationError("Metric {0} is unknown".format(k))
- if k in ('AV', 'AC', 'PR', 'UI', 'S', 'C', 'I', 'A'):
- raise ValidationError("Metric {0} must be specified in the dropdowns".format(k))
- if not cvss.constants3.METRICS_VALUES[k].has_key(v):
- raise ValidationError("Metric {0} has unknown value&nb