Python 3 Start

This commit is contained in:
Luke Rogers 2014-03-06 11:45:00 +13:00
parent 9f029c8ceb
commit 141fe8d80c
67 changed files with 264 additions and 274 deletions

View file

@ -1,18 +1,15 @@
# convenience wrapper for urllib2 & friends
import cookielib
import http.cookiejar
import json
import urllib
import urllib2
import urlparse
from urllib import quote, quote_plus as _quote_plus
import urllib.request, urllib.parse, urllib.error
import urllib.request, urllib.error, urllib.parse
import urllib.parse
from urllib.parse import quote, quote_plus as _quote_plus
from lxml import etree, html
from bs4 import BeautifulSoup
# used in plugins that import this
from urllib2 import URLError, HTTPError
from urllib.error import URLError, HTTPError
ua_cloudbot = 'Cloudbot/DEV http://github.com/CloudDev/CloudBot'
@ -24,7 +21,7 @@ ua_internetexplorer = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'
ua_chrome = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.4 (KHTML, ' \
'like Gecko) Chrome/22.0.1229.79 Safari/537.4'
jar = cookielib.CookieJar()
jar = http.cookiejar.CookieJar()
def get(*args, **kwargs):
@ -63,13 +60,13 @@ def open(url, query_params=None, user_agent=None, post_data=None,
url = prepare_url(url, query_params)
request = urllib2.Request(url, post_data)
request = urllib.request.Request(url, post_data)
if get_method is not None:
request.get_method = lambda: get_method
if headers is not None:
for header_key, header_value in headers.iteritems():
for header_key, header_value in headers.items():
request.add_header(header_key, header_value)
request.add_header('User-Agent', user_agent)
@ -78,9 +75,9 @@ def open(url, query_params=None, user_agent=None, post_data=None,
request.add_header('Referer', referer)
if cookies:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar))
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(jar))
else:
opener = urllib2.build_opener()
opener = urllib.request.build_opener()
if timeout:
return opener.open(request, timeout=timeout)
@ -90,20 +87,20 @@ def open(url, query_params=None, user_agent=None, post_data=None,
def prepare_url(url, queries):
if queries:
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
query = dict(urlparse.parse_qsl(query))
query = dict(urllib.parse.parse_qsl(query))
query.update(queries)
query = urllib.urlencode(dict((to_utf8(key), to_utf8(value))
for key, value in query.iteritems()))
query = urllib.parse.urlencode(dict((to_utf8(key), to_utf8(value))
for key, value in query.items()))
url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
url = urllib.parse.urlunsplit((scheme, netloc, path, query, fragment))
return url
def to_utf8(s):
if isinstance(s, unicode):
if isinstance(s, str):
return s.encode('utf8', 'ignore')
else:
return str(s)