from django.utils.safestring import mark_safe
from django.conf import settings
-import httplib
-import urllib
-import json
+import requests
import logging
log = logging.getLogger(__name__)
super(ReCaptchaField, self).clean(value)
# Validate the recaptcha
- c = httplib.HTTPSConnection('www.google.com', strict=True, timeout=5)
param = {
'secret': settings.RECAPTCHA_SECRET_KEY,
'response': value,
}
-
- # Temporarily don't include remoteip, because it only shows our ssl terminating
- # frontends.
-# if self.remoteip:
-# param['remoteip'] = self.remoteip
-
try:
- c.request('POST', '/recaptcha/api/siteverify', urllib.urlencode(param), {
- 'Content-type': 'application/x-www-form-urlencoded',
- })
- c.sock.settimeout(10)
- except Exception as e:
- # Error to connect at TCP level
+ r = requests.post(
+ "https://www.google.com/recaptcha/api/siteverify", param,
+ headers={
+ 'Content-type': 'application/x-www-form-urlencoded',
+ },
+ timeout=5,
+ )
+ except requests.exceptions.Timeout:
log.error('Failed to connect to google recaptcha API: %s' % e)
raise ValidationError('Failed in API call to google recaptcha')
- try:
- r = c.getresponse()
- except:
- log.error('Failed in API call to google recaptcha')
- raise ValidationError('Failed in API call to google recaptcha')
- if r.status != 200:
+ if r.status_code != 200:
log.error('Invalid response code from google recaptcha')
raise ValidationError('Invalid response code from google recaptcha')
try:
- j = json.loads(r.read())
+ j = r.json()
except:
log.error('Invalid response structure from google recaptcha')
raise ValidationError('Invalid response structure from google recaptcha')
from pgweb.util.decorators import cache
-import httplib
-import urllib
+import urllib.parse
+import requests
import psycopg2
-import json
-import socket
-import ssl
from pgweb.lists.models import MailingList
memc = None
if not hits:
# No hits found - so try to get them from the search server
- if settings.ARCHIVES_SEARCH_PLAINTEXT:
- c = httplib.HTTPConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
- else:
- c = httplib.HTTPSConnection(settings.ARCHIVES_SEARCH_SERVER, strict=True, timeout=5)
- c.request('POST', '/archives-search/', urlstr, {'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'})
- c.sock.settimeout(20) # Set a 20 second timeout
try:
- r = c.getresponse()
- except (socket.timeout, ssl.SSLError):
+ r = requests.post(
+ "{}://{}/archives-search/".format(settings.ARCHIVES_SEARCH_PLAINTEXT and 'http' or 'https', settings.ARCHIVES_SEARCH_SERVER),
+ urlstr,
+ headers={
+ 'Content-type': 'application/x-www-form-urlencoded; charset=utf-8',
+ },
+ timeout=5,
+ )
+ except requests.exceptions.Timeout:
return render(request, 'search/listsearch.html', {
'search_error': 'Timeout when talking to search server. Please try your search again later, or with a more restrictive search terms.',
})
- if r.status != 200:
+ except:
+ return render(request, 'search/listsearch.html', {
+ 'search_error': 'General error when talking to search server.',
+ })
+ if r.status_code != 200:
memc = None
return render(request, 'search/listsearch.html', {
'search_error': 'Error talking to search server: %s' % r.reason,
})
- hits = json.loads(r.read())
+ hits = r.json()
if has_memcached and memc:
# Store them in memcached too! But only for 10 minutes...
# And always compress it, just because we can
from datetime import datetime
import pickle as pickle
import codecs
-import urllib2
+import requests
# Directories, specified from the root of the ftp tree and down, that
# will be recursively excluded from the pickle.
parse_directory(sys.argv[1], len(sys.argv[1]))
if sys.argv[2].startswith("http://") or sys.argv[2].startswith("https://"):
- o = urllib2.build_opener(urllib2.HTTPHandler)
- r = urllib2.Request(sys.argv[2], data=pickle.dumps(allnodes))
- r.add_header('Content-type', 'application/octet-stream')
- r.add_header('Host', 'www.postgresql.org')
- r.get_method = lambda: 'PUT'
- u = o.open(r)
- x = u.read()
- if x != "NOT CHANGED" and x != "OK":
+ r = requests.put(
+ sys.argv[2],
+ data=pickle.dumps(allnodes),
+ headers={
+ 'Content-type': 'application/octet-stream',
+ },
+ )
+ if r.status_code != 200:
+ print("Failed to upload, code: %s" % r.status_code)
+ sys.exit(1)
+ elif r.text != "NOT CHANGED" and r.text != "OK":
print("Failed to upload: %s" % x)
sys.exit(1)
else:
import os
import re
import json
-import urllib2
+import requests
from decimal import Decimal
from tempfile import NamedTemporaryFile
j = json.dumps({'platforms': platforms, 'reporpms': reporpms})
if args.target.startswith('http://') or args.target.startswith('https://'):
- o = urllib.request.build_opener(urllib.request.HTTPHandler)
- r = urllib.request.Request(sys.argv[2], data=j)
- r.add_header('Content-type', 'application/json')
- r.add_header('Host', 'www.postgresql.org')
- r.get_method = lambda: 'PUT'
- u = o.open(r)
- x = u.read()
- if x != "NOT CHANGED" and x != "OK":
+ r = requests.put(
+ args.target,
+ data=j,
+ headers={
+ 'Content-type': 'application/json',
+ },
+ )
+ if r.status_code != 200:
+ print("Failed to upload, code: %s" % r.status_code)
+ sys.exit(1)
+
+ if r.text != "NOT CHANGED" and r.text != "OK":
print("Failed to upload: %s" % x)
sys.exit(1)
else:
import time
import sys
import select
-import httplib
+import requests
import multiprocessing
import logging
import psycopg2
def do_purge(consumername, headers):
try:
- conn = httplib.HTTPSConnection('%s.postgresql.org' % consumername)
- conn.request("GET", "/varnish-purge-url", '', headers)
- resp = conn.getresponse()
- conn.close()
- if resp.status == 200:
+ r = requests.get("https://{}.postgresql.org/varnish-purge-url".format(consumername),
+ headers=headers,
+ timeout=10)
+ if r.status_code == 200:
return True
- logging.warning("Varnish purge on %s returned status %s (%s)" % (consumername, resp.status, resp.reason))
+ logging.warning("Varnish purge on %s returned status %s (%s)" % (consumername, r.status_code, r.reason))
return False
except Exception as ex:
logging.error("Exception purging on %s: %s" % (consumername, ex))