diff --git a/lib/httplib2/__init__.py b/lib/httplib2/__init__.py new file mode 100644 index 0000000..abd0b2d --- /dev/null +++ b/lib/httplib2/__init__.py @@ -0,0 +1,1671 @@ +from __future__ import generators +""" +httplib2 + +A caching http interface that supports ETags and gzip +to conserve bandwidth. + +Requires Python 2.3 or later + +Changelog: +2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. + +""" + +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", + "James Antill", + "Xavier Verges Farrero", + "Jonathan Feinberg", + "Blair Zajac", + "Sam Ruby", + "Louis Nyffenegger"] +__license__ = "MIT" +__version__ = "0.7.5" + +import re +import sys +import email +import email.Utils +import email.Message +import email.FeedParser +import StringIO +import gzip +import zlib +import httplib +import urlparse +import urllib +import base64 +import os +import copy +import calendar +import time +import random +import errno +try: + from hashlib import sha1 as _sha, md5 as _md5 +except ImportError: + # prior to Python 2.5, these were separate modules + import sha + import md5 + _sha = sha.new + _md5 = md5.new +import hmac +from gettext import gettext as _ +import socket + +try: + from httplib2 import socks +except ImportError: + try: + import socks + except ImportError: + socks = None + +# Build the appropriate socket wrapper for ssl +try: + import ssl # python 2.6 + ssl_SSLError = ssl.SSLError + def _ssl_wrap_socket(sock, key_file, cert_file, + disable_validation, ca_certs): + if disable_validation: + cert_reqs = ssl.CERT_NONE + else: + cert_reqs = ssl.CERT_REQUIRED + # We should be specifying SSL version 3 or TLS v1, but the ssl module + # doesn't expose the necessary knobs. So we need to go with the default + # of SSLv23. + return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file, + cert_reqs=cert_reqs, ca_certs=ca_certs) +except (AttributeError, ImportError): + ssl_SSLError = None + def _ssl_wrap_socket(sock, key_file, cert_file, + disable_validation, ca_certs): + if not disable_validation: + raise CertificateValidationUnsupported( + "SSL certificate validation is not supported without " + "the ssl module installed. To avoid this error, install " + "the ssl module, or explicity disable validation.") + ssl_sock = socket.ssl(sock, key_file, cert_file) + return httplib.FakeSocket(sock, ssl_sock) + + +if sys.version_info >= (2,3): + from iri2uri import iri2uri +else: + def iri2uri(uri): + return uri + +def has_timeout(timeout): # python 2.6 + if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'): + return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT) + return (timeout is not None) + +__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error', + 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', + 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', + 'debuglevel', 'ProxiesUnavailableError'] + + +# The httplib debug level, set to a non-zero value to get debug output +debuglevel = 0 + +# A request will be tried 'RETRIES' times if it fails at the socket/connection level. +RETRIES = 2 + +# Python 2.3 support +if sys.version_info < (2,4): + def sorted(seq): + seq.sort() + return seq + +# Python 2.3 support +def HTTPResponse__getheaders(self): + """Return list of (header, value) tuples.""" + if self.msg is None: + raise httplib.ResponseNotReady() + return self.msg.items() + +if not hasattr(httplib.HTTPResponse, 'getheaders'): + httplib.HTTPResponse.getheaders = HTTPResponse__getheaders + +# All exceptions raised here derive from HttpLib2Error +class HttpLib2Error(Exception): pass + +# Some exceptions can be caught and optionally +# be turned back into responses. +class HttpLib2ErrorWithResponse(HttpLib2Error): + def __init__(self, desc, response, content): + self.response = response + self.content = content + HttpLib2Error.__init__(self, desc) + +class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass +class RedirectLimit(HttpLib2ErrorWithResponse): pass +class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass +class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass +class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass + +class MalformedHeader(HttpLib2Error): pass +class RelativeURIError(HttpLib2Error): pass +class ServerNotFoundError(HttpLib2Error): pass +class ProxiesUnavailableError(HttpLib2Error): pass +class CertificateValidationUnsupported(HttpLib2Error): pass +class SSLHandshakeError(HttpLib2Error): pass +class NotSupportedOnThisPlatform(HttpLib2Error): pass +class CertificateHostnameMismatch(SSLHandshakeError): + def __init__(self, desc, host, cert): + HttpLib2Error.__init__(self, desc) + self.host = host + self.cert = cert + +# Open Items: +# ----------- +# Proxy support + +# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) + +# Pluggable cache storage (supports storing the cache in +# flat files by default. We need a plug-in architecture +# that can support Berkeley DB and Squid) + +# == Known Issues == +# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. +# Does not handle Cache-Control: max-stale +# Does not use Age: headers when calculating cache freshness. + + +# The number of redirections to follow before giving up. +# Note that only GET redirects are automatically followed. +# Will also honor 301 requests by saving that info and never +# requesting that URI again. +DEFAULT_MAX_REDIRECTS = 5 + +# Default CA certificates file bundled with httplib2. +CA_CERTS = os.path.join( + os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt") + +# Which headers are hop-by-hop headers by default +HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] + +def _get_end2end_headers(response): + hopbyhop = list(HOP_BY_HOP) + hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) + return [header for header in response.keys() if header not in hopbyhop] + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + +def urlnorm(uri): + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) + authority = authority.lower() + scheme = scheme.lower() + if not path: + path = "/" + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + scheme = scheme.lower() + defrag_uri = scheme + "://" + authority + request_uri + return scheme, authority, request_uri, defrag_uri + + +# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) +re_url_scheme = re.compile(r'^\w+://') +re_slash = re.compile(r'[?/:|]+') + +def safename(filename): + """Return a filename suitable for the cache. + + Strips dangerous and common characters to create a filename we + can use to store the cache in. + """ + + try: + if re_url_scheme.match(filename): + if isinstance(filename,str): + filename = filename.decode('utf-8') + filename = filename.encode('idna') + else: + filename = filename.encode('idna') + except UnicodeError: + pass + if isinstance(filename,unicode): + filename=filename.encode('utf-8') + filemd5 = _md5(filename).hexdigest() + filename = re_url_scheme.sub("", filename) + filename = re_slash.sub(",", filename) + + # limit length of filename + if len(filename)>200: + filename=filename[:200] + return ",".join((filename, filemd5)) + +NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') +def _normalize_headers(headers): + return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) + +def _parse_cache_control(headers): + retval = {} + if headers.has_key('cache-control'): + parts = headers['cache-control'].split(',') + parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")] + parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] + retval = dict(parts_with_args + parts_wo_args) + return retval + +# Whether to use a strict mode to parse WWW-Authenticate headers +# Might lead to bad results in case of ill-formed header value, +# so disabled by default, falling back to relaxed parsing. +# Set to true to turn on, usefull for testing servers. +USE_WWW_AUTH_STRICT_PARSING = 0 + +# In regex below: +# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP +# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space +# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: +# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"? +WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") +WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(? current_age: + retval = "FRESH" + return retval + +def _decompressContent(response, new_content): + content = new_content + try: + encoding = response.get('content-encoding', None) + if encoding in ['gzip', 'deflate']: + if encoding == 'gzip': + content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() + if encoding == 'deflate': + content = zlib.decompress(content) + response['content-length'] = str(len(content)) + # Record the historical presence of the encoding in a way the won't interfere. + response['-content-encoding'] = response['content-encoding'] + del response['content-encoding'] + except IOError: + content = "" + raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) + return content + +def _updateCache(request_headers, response_headers, content, cache, cachekey): + if cachekey: + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + if cc.has_key('no-store') or cc_response.has_key('no-store'): + cache.delete(cachekey) + else: + info = email.Message.Message() + for key, value in response_headers.iteritems(): + if key not in ['status','content-encoding','transfer-encoding']: + info[key] = value + + # Add annotations to the cache to indicate what headers + # are variant for this request. + vary = response_headers.get('vary', None) + if vary: + vary_headers = vary.lower().replace(' ', '').split(',') + for header in vary_headers: + key = '-varied-%s' % header + try: + info[key] = request_headers[header] + except KeyError: + pass + + status = response_headers.status + if status == 304: + status = 200 + + status_header = 'status: %d\r\n' % status + + header_str = info.as_string() + + header_str = re.sub("\r(?!\n)|(? 0: + service = "cl" + # No point in guessing Base or Spreadsheet + #elif request_uri.find("spreadsheets") > 0: + # service = "wise" + + auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) + resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) + lines = content.split('\n') + d = dict([tuple(line.split("=", 1)) for line in lines if line]) + if resp.status == 403: + self.Auth = "" + else: + self.Auth = d['Auth'] + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['authorization'] = 'GoogleLogin Auth=' + self.Auth + + +AUTH_SCHEME_CLASSES = { + "basic": BasicAuthentication, + "wsse": WsseAuthentication, + "digest": DigestAuthentication, + "hmacdigest": HmacDigestAuthentication, + "googlelogin": GoogleLoginAuthentication +} + +AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] + +class FileCache(object): + """Uses a local directory as a store for cached files. + Not really safe to use if multiple threads or processes are going to + be running on the same cache. + """ + def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior + self.cache = cache + self.safe = safe + if not os.path.exists(cache): + os.makedirs(self.cache) + + def get(self, key): + retval = None + cacheFullPath = os.path.join(self.cache, self.safe(key)) + try: + f = file(cacheFullPath, "rb") + retval = f.read() + f.close() + except IOError: + pass + return retval + + def set(self, key, value): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + f = file(cacheFullPath, "wb") + f.write(value) + f.close() + + def delete(self, key): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + if os.path.exists(cacheFullPath): + os.remove(cacheFullPath) + +class Credentials(object): + def __init__(self): + self.credentials = [] + + def add(self, name, password, domain=""): + self.credentials.append((domain.lower(), name, password)) + + def clear(self): + self.credentials = [] + + def iter(self, domain): + for (cdomain, name, password) in self.credentials: + if cdomain == "" or domain == cdomain: + yield (name, password) + +class KeyCerts(Credentials): + """Identical to Credentials except that + name/password are mapped to key/cert.""" + pass + +class AllHosts(object): + pass + +class ProxyInfo(object): + """Collect information required to use a proxy.""" + bypass_hosts = () + + def __init__(self, proxy_type, proxy_host, proxy_port, + proxy_rdns=None, proxy_user=None, proxy_pass=None): + """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX + constants. For example: + + p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, + proxy_host='localhost', proxy_port=8000) + """ + self.proxy_type = proxy_type + self.proxy_host = proxy_host + self.proxy_port = proxy_port + self.proxy_rdns = proxy_rdns + self.proxy_user = proxy_user + self.proxy_pass = proxy_pass + + def astuple(self): + return (self.proxy_type, self.proxy_host, self.proxy_port, + self.proxy_rdns, self.proxy_user, self.proxy_pass) + + def isgood(self): + return (self.proxy_host != None) and (self.proxy_port != None) + + @classmethod + def from_environment(cls, method='http'): + """ + Read proxy info from the environment variables. + """ + if method not in ['http', 'https']: + return + + env_var = method + '_proxy' + url = os.environ.get(env_var, os.environ.get(env_var.upper())) + if not url: + return + pi = cls.from_url(url, method) + + no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', '')) + bypass_hosts = [] + if no_proxy: + bypass_hosts = no_proxy.split(',') + # special case, no_proxy=* means all hosts bypassed + if no_proxy == '*': + bypass_hosts = AllHosts + + pi.bypass_hosts = bypass_hosts + return pi + + @classmethod + def from_url(cls, url, method='http'): + """ + Construct a ProxyInfo from a URL (such as http_proxy env var) + """ + url = urlparse.urlparse(url) + username = None + password = None + port = None + if '@' in url[1]: + ident, host_port = url[1].split('@', 1) + if ':' in ident: + username, password = ident.split(':', 1) + else: + password = ident + else: + host_port = url[1] + if ':' in host_port: + host, port = host_port.split(':', 1) + else: + host = host_port + + if port: + port = int(port) + else: + port = dict(https=443, http=80)[method] + + proxy_type = 3 # socks.PROXY_TYPE_HTTP + return cls( + proxy_type = proxy_type, + proxy_host = host, + proxy_port = port, + proxy_user = username or None, + proxy_pass = password or None, + ) + + def applies_to(self, hostname): + return not self.bypass_host(hostname) + + def bypass_host(self, hostname): + """Has this host been excluded from the proxy config""" + if self.bypass_hosts is AllHosts: + return True + + bypass = False + for domain in self.bypass_hosts: + if hostname.endswith(domain): + bypass = True + + return bypass + + +class HTTPConnectionWithTimeout(httplib.HTTPConnection): + """ + HTTPConnection subclass that supports timeouts + + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ + + def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): + httplib.HTTPConnection.__init__(self, host, port, strict) + self.timeout = timeout + self.proxy_info = proxy_info + + def connect(self): + """Connect to the host and port specified in __init__.""" + # Mostly verbatim from httplib.py. + if self.proxy_info and socks is None: + raise ProxiesUnavailableError( + 'Proxy support missing but proxy use was requested!') + msg = "getaddrinfo returns an empty list" + if self.proxy_info and self.proxy_info.isgood(): + use_proxy = True + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() + else: + use_proxy = False + if use_proxy and proxy_rdns: + host = proxy_host + port = proxy_port + else: + host = self.host + port = self.port + + for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + try: + if use_proxy: + self.sock = socks.socksocket(af, socktype, proto) + self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) + else: + self.sock = socket.socket(af, socktype, proto) + self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + # Different from httplib: support timeouts. + if has_timeout(self.timeout): + self.sock.settimeout(self.timeout) + # End of difference from httplib. + if self.debuglevel > 0: + print "connect: (%s, %s) ************" % (self.host, self.port) + if use_proxy: + print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + + self.sock.connect((self.host, self.port) + sa[2:]) + except socket.error, msg: + if self.debuglevel > 0: + print "connect fail: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): + """ + This class allows communication via SSL. + + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, + ca_certs=None, disable_ssl_certificate_validation=False): + httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, + cert_file=cert_file, strict=strict) + self.timeout = timeout + self.proxy_info = proxy_info + if ca_certs is None: + ca_certs = CA_CERTS + self.ca_certs = ca_certs + self.disable_ssl_certificate_validation = \ + disable_ssl_certificate_validation + + # The following two methods were adapted from https_wrapper.py, released + # with the Google Appengine SDK at + # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py + # under the following license: + # + # Copyright 2007 Google Inc. + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # + + def _GetValidHostsForCert(self, cert): + """Returns a list of valid host globs for an SSL certificate. + + Args: + cert: A dictionary representing an SSL certificate. + Returns: + list: A list of valid host globs. + """ + if 'subjectAltName' in cert: + return [x[1] for x in cert['subjectAltName'] + if x[0].lower() == 'dns'] + else: + return [x[0][1] for x in cert['subject'] + if x[0][0].lower() == 'commonname'] + + def _ValidateCertificateHostname(self, cert, hostname): + """Validates that a given hostname is valid for an SSL certificate. + + Args: + cert: A dictionary representing an SSL certificate. + hostname: The hostname to test. + Returns: + bool: Whether or not the hostname is valid for this certificate. + """ + hosts = self._GetValidHostsForCert(cert) + for host in hosts: + host_re = host.replace('.', '\.').replace('*', '[^.]*') + if re.search('^%s$' % (host_re,), hostname, re.I): + return True + return False + + def connect(self): + "Connect to a host on a given (SSL) port." + + msg = "getaddrinfo returns an empty list" + if self.proxy_info and self.proxy_info.isgood(): + use_proxy = True + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() + else: + use_proxy = False + if use_proxy and proxy_rdns: + host = proxy_host + port = proxy_port + else: + host = self.host + port = self.port + + for family, socktype, proto, canonname, sockaddr in socket.getaddrinfo( + host, port, 0, socket.SOCK_STREAM): + try: + if use_proxy: + sock = socks.socksocket(family, socktype, proto) + + sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) + else: + sock = socket.socket(family, socktype, proto) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + if has_timeout(self.timeout): + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + self.sock =_ssl_wrap_socket( + sock, self.key_file, self.cert_file, + self.disable_ssl_certificate_validation, self.ca_certs) + if self.debuglevel > 0: + print "connect: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if not self.disable_ssl_certificate_validation: + cert = self.sock.getpeercert() + hostname = self.host.split(':', 0)[0] + if not self._ValidateCertificateHostname(cert, hostname): + raise CertificateHostnameMismatch( + 'Server presented certificate that does not match ' + 'host %s: %s' % (hostname, cert), hostname, cert) + except ssl_SSLError, e: + if sock: + sock.close() + if self.sock: + self.sock.close() + self.sock = None + # Unfortunately the ssl module doesn't seem to provide any way + # to get at more detailed error information, in particular + # whether the error is due to certificate validation or + # something else (such as SSL protocol mismatch). + if e.errno == ssl.SSL_ERROR_SSL: + raise SSLHandshakeError(e) + else: + raise + except (socket.timeout, socket.gaierror): + raise + except socket.error, msg: + if self.debuglevel > 0: + print "connect fail: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +SCHEME_TO_CONNECTION = { + 'http': HTTPConnectionWithTimeout, + 'https': HTTPSConnectionWithTimeout + } + +# Use a different connection object for Google App Engine +try: + from google.appengine.api import apiproxy_stub_map + if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: + raise ImportError # Bail out; we're not actually running on App Engine. + from google.appengine.api.urlfetch import fetch + from google.appengine.api.urlfetch import InvalidURLError + from google.appengine.api.urlfetch import DownloadError + from google.appengine.api.urlfetch import ResponseTooLargeError + from google.appengine.api.urlfetch import SSLCertificateError + + + class ResponseDict(dict): + """Is a dictionary that also has a read() method, so + that it can pass itself off as an httlib.HTTPResponse().""" + def read(self): + pass + + + class AppEngineHttpConnection(object): + """Emulates an httplib.HTTPConnection object, but actually uses the Google + App Engine urlfetch library. This allows the timeout to be properly used on + Google App Engine, and avoids using httplib, which on Google App Engine is + just another wrapper around urlfetch. + """ + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, ca_certs=None, + disable_ssl_certificate_validation=False): + self.host = host + self.port = port + self.timeout = timeout + if key_file or cert_file or proxy_info or ca_certs: + raise NotSupportedOnThisPlatform() + self.response = None + self.scheme = 'http' + self.validate_certificate = not disable_ssl_certificate_validation + self.sock = True + + def request(self, method, url, body, headers): + # Calculate the absolute URI, which fetch requires + netloc = self.host + if self.port: + netloc = '%s:%s' % (self.host, self.port) + absolute_uri = '%s://%s%s' % (self.scheme, netloc, url) + try: + response = fetch(absolute_uri, payload=body, method=method, + headers=headers, allow_truncated=False, follow_redirects=False, + deadline=self.timeout, + validate_certificate=self.validate_certificate) + self.response = ResponseDict(response.headers) + self.response['status'] = str(response.status_code) + self.response['reason'] = httplib.responses.get(response.status_code, 'Ok') + self.response.status = response.status_code + setattr(self.response, 'read', lambda : response.content) + + # Make sure the exceptions raised match the exceptions expected. + except InvalidURLError: + raise socket.gaierror('') + except (DownloadError, ResponseTooLargeError, SSLCertificateError): + raise httplib.HTTPException() + + def getresponse(self): + if self.response: + return self.response + else: + raise httplib.HTTPException() + + def set_debuglevel(self, level): + pass + + def connect(self): + pass + + def close(self): + pass + + + class AppEngineHttpsConnection(AppEngineHttpConnection): + """Same as AppEngineHttpConnection, but for HTTPS URIs.""" + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, ca_certs=None, + disable_ssl_certificate_validation=False): + AppEngineHttpConnection.__init__(self, host, port, key_file, cert_file, + strict, timeout, proxy_info, ca_certs, disable_ssl_certificate_validation) + self.scheme = 'https' + + # Update the connection classes to use the Googel App Engine specific ones. + SCHEME_TO_CONNECTION = { + 'http': AppEngineHttpConnection, + 'https': AppEngineHttpsConnection + } + +except ImportError: + pass + + +class Http(object): + """An HTTP client that handles: +- all methods +- caching +- ETags +- compression, +- HTTPS +- Basic +- Digest +- WSSE + +and more. + """ + def __init__(self, cache=None, timeout=None, + proxy_info=ProxyInfo.from_environment, + ca_certs=None, disable_ssl_certificate_validation=False): + """If 'cache' is a string then it is used as a directory name for + a disk cache. Otherwise it must be an object that supports the + same interface as FileCache. + + All timeouts are in seconds. If None is passed for timeout + then Python's default timeout for sockets will be used. See + for example the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + + `proxy_info` may be: + - a callable that takes the http scheme ('http' or 'https') and + returns a ProxyInfo instance per request. By default, uses + ProxyInfo.from_environment. + - a ProxyInfo instance (static proxy config). + - None (proxy disabled). + + ca_certs is the path of a file containing root CA certificates for SSL + server certificate validation. By default, a CA cert file bundled with + httplib2 is used. + + If disable_ssl_certificate_validation is true, SSL cert validation will + not be performed. + """ + self.proxy_info = proxy_info + self.ca_certs = ca_certs + self.disable_ssl_certificate_validation = \ + disable_ssl_certificate_validation + + # Map domain name to an httplib connection + self.connections = {} + # The location of the cache, for now a directory + # where cached responses are held. + if cache and isinstance(cache, basestring): + self.cache = FileCache(cache) + else: + self.cache = cache + + # Name/password + self.credentials = Credentials() + + # Key/cert + self.certificates = KeyCerts() + + # authorization objects + self.authorizations = [] + + # If set to False then no redirects are followed, even safe ones. + self.follow_redirects = True + + # Which HTTP methods do we apply optimistic concurrency to, i.e. + # which methods get an "if-match:" etag header added to them. + self.optimistic_concurrency_methods = ["PUT", "PATCH"] + + # If 'follow_redirects' is True, and this is set to True then + # all redirecs are followed, including unsafe ones. + self.follow_all_redirects = False + + self.ignore_etag = False + + self.force_exception_to_status_code = False + + self.timeout = timeout + + # Keep Authorization: headers on a redirect. + self.forward_authorization_headers = False + + def _auth_from_challenge(self, host, request_uri, headers, response, content): + """A generator that creates Authorization objects + that can be applied to requests. + """ + challenges = _parse_www_authenticate(response, 'www-authenticate') + for cred in self.credentials.iter(host): + for scheme in AUTH_SCHEME_ORDER: + if challenges.has_key(scheme): + yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) + + def add_credentials(self, name, password, domain=""): + """Add a name and password that will be used + any time a request requires authentication.""" + self.credentials.add(name, password, domain) + + def add_certificate(self, key, cert, domain): + """Add a key and cert that will be used + any time a request requires authentication.""" + self.certificates.add(key, cert, domain) + + def clear_credentials(self): + """Remove all the names and passwords + that are used for authentication""" + self.credentials.clear() + self.authorizations = [] + + def _conn_request(self, conn, request_uri, method, body, headers): + for i in range(RETRIES): + try: + if conn.sock is None: + conn.connect() + conn.request(method, request_uri, body, headers) + except socket.timeout: + raise + except socket.gaierror: + conn.close() + raise ServerNotFoundError("Unable to find the server at %s" % conn.host) + except ssl_SSLError: + conn.close() + raise + except socket.error, e: + err = 0 + if hasattr(e, 'args'): + err = getattr(e, 'args')[0] + else: + err = e.errno + if err == errno.ECONNREFUSED: # Connection refused + raise + except httplib.HTTPException: + # Just because the server closed the connection doesn't apparently mean + # that the server didn't send a response. + if conn.sock is None: + if i < RETRIES-1: + conn.close() + conn.connect() + continue + else: + conn.close() + raise + if i < RETRIES-1: + conn.close() + conn.connect() + continue + try: + response = conn.getresponse() + except (socket.error, httplib.HTTPException): + if i < RETRIES-1: + conn.close() + conn.connect() + continue + else: + raise + else: + content = "" + if method == "HEAD": + conn.close() + else: + content = response.read() + response = Response(response) + if method != "HEAD": + content = _decompressContent(response, content) + break + return (response, content) + + + def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): + """Do the actual request using the connection object + and also follow one level of redirects if necessary""" + + auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] + auth = auths and sorted(auths)[0][1] or None + if auth: + auth.request(method, request_uri, headers, body) + + (response, content) = self._conn_request(conn, request_uri, method, body, headers) + + if auth: + if auth.response(response, body): + auth.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers ) + response._stale_digest = 1 + + if response.status == 401: + for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): + authorization.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) + if response.status != 401: + self.authorizations.append(authorization) + authorization.response(response, body) + break + + if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): + if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: + # Pick out the location header and basically start from the beginning + # remembering first to strip the ETag header and decrement our 'depth' + if redirections: + if not response.has_key('location') and response.status != 300: + raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) + # Fix-up relative redirects (which violate an RFC 2616 MUST) + if response.has_key('location'): + location = response['location'] + (scheme, authority, path, query, fragment) = parse_uri(location) + if authority == None: + response['location'] = urlparse.urljoin(absolute_uri, location) + if response.status == 301 and method in ["GET", "HEAD"]: + response['-x-permanent-redirect-url'] = response['location'] + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + if headers.has_key('if-none-match'): + del headers['if-none-match'] + if headers.has_key('if-modified-since'): + del headers['if-modified-since'] + if 'authorization' in headers and not self.forward_authorization_headers: + del headers['authorization'] + if response.has_key('location'): + location = response['location'] + old_response = copy.deepcopy(response) + if not old_response.has_key('content-location'): + old_response['content-location'] = absolute_uri + redirect_method = method + if response.status in [302, 303]: + redirect_method = "GET" + body = None + (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1) + response.previous = old_response + else: + raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content) + elif response.status in [200, 203] and method in ["GET", "HEAD"]: + # Don't cache 206's since we aren't going to handle byte range requests + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + + return (response, content) + + def _normalize_headers(self, headers): + return _normalize_headers(headers) + +# Need to catch and rebrand some exceptions +# Then need to optionally turn all exceptions into status codes +# including all socket.* and httplib.* exceptions. + + + def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): + """ Performs a single HTTP request. +The 'uri' is the URI of the HTTP resource and can begin +with either 'http' or 'https'. The value of 'uri' must be an absolute URI. + +The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. +There is no restriction on the methods allowed. + +The 'body' is the entity body to be sent with the request. It is a string +object. + +Any extra headers that are to be sent with the request should be provided in the +'headers' dictionary. + +The maximum number of redirect to follow before raising an +exception is 'redirections. The default is 5. + +The return value is a tuple of (response, content), the first +being and instance of the 'Response' class, the second being +a string that contains the response entity body. + """ + try: + if headers is None: + headers = {} + else: + headers = self._normalize_headers(headers) + + if not headers.has_key('user-agent'): + headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__ + + uri = iri2uri(uri) + + (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) + domain_port = authority.split(":")[0:2] + if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http': + scheme = 'https' + authority = domain_port[0] + + proxy_info = self._get_proxy_info(scheme, authority) + + conn_key = scheme+":"+authority + if conn_key in self.connections: + conn = self.connections[conn_key] + else: + if not connection_type: + connection_type = SCHEME_TO_CONNECTION[scheme] + certs = list(self.certificates.iter(authority)) + if scheme == 'https': + if certs: + conn = self.connections[conn_key] = connection_type( + authority, key_file=certs[0][0], + cert_file=certs[0][1], timeout=self.timeout, + proxy_info=proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation= + self.disable_ssl_certificate_validation) + else: + conn = self.connections[conn_key] = connection_type( + authority, timeout=self.timeout, + proxy_info=proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation= + self.disable_ssl_certificate_validation) + else: + conn = self.connections[conn_key] = connection_type( + authority, timeout=self.timeout, + proxy_info=proxy_info) + conn.set_debuglevel(debuglevel) + + if 'range' not in headers and 'accept-encoding' not in headers: + headers['accept-encoding'] = 'gzip, deflate' + + info = email.Message.Message() + cached_value = None + if self.cache: + cachekey = defrag_uri + cached_value = self.cache.get(cachekey) + if cached_value: + # info = email.message_from_string(cached_value) + # + # Need to replace the line above with the kludge below + # to fix the non-existent bug not fixed in this + # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html + try: + info, content = cached_value.split('\r\n\r\n', 1) + feedparser = email.FeedParser.FeedParser() + feedparser.feed(info) + info = feedparser.close() + feedparser._parse = None + except (IndexError, ValueError): + self.cache.delete(cachekey) + cachekey = None + cached_value = None + else: + cachekey = None + + if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: + # http://www.w3.org/1999/04/Editing/ + headers['if-match'] = info['etag'] + + if method not in ["GET", "HEAD"] and self.cache and cachekey: + # RFC 2616 Section 13.10 + self.cache.delete(cachekey) + + # Check the vary header in the cache to see if this request + # matches what varies in the cache. + if method in ['GET', 'HEAD'] and 'vary' in info: + vary = info['vary'] + vary_headers = vary.lower().replace(' ', '').split(',') + for header in vary_headers: + key = '-varied-%s' % header + value = info[key] + if headers.get(header, None) != value: + cached_value = None + break + + if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: + if info.has_key('-x-permanent-redirect-url'): + # Should cached permanent redirects be counted in our redirection count? For now, yes. + if redirections <= 0: + raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "") + (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1) + response.previous = Response(info) + response.previous.fromcache = True + else: + # Determine our course of action: + # Is the cached entry fresh or stale? + # Has the client requested a non-cached response? + # + # There seems to be three possible answers: + # 1. [FRESH] Return the cache entry w/o doing a GET + # 2. [STALE] Do the GET (but add in cache validators if available) + # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request + entry_disposition = _entry_disposition(info, headers) + + if entry_disposition == "FRESH": + if not cached_value: + info['status'] = '504' + content = "" + response = Response(info) + if cached_value: + response.fromcache = True + return (response, content) + + if entry_disposition == "STALE": + if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: + headers['if-none-match'] = info['etag'] + if info.has_key('last-modified') and not 'last-modified' in headers: + headers['if-modified-since'] = info['last-modified'] + elif entry_disposition == "TRANSPARENT": + pass + + (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + + if response.status == 304 and method == "GET": + # Rewrite the cache entry with the new end-to-end headers + # Take all headers that are in response + # and overwrite their values in info. + # unless they are hop-by-hop, or are listed in the connection header. + + for key in _get_end2end_headers(response): + info[key] = response[key] + merged_response = Response(info) + if hasattr(response, "_stale_digest"): + merged_response._stale_digest = response._stale_digest + _updateCache(headers, merged_response, content, self.cache, cachekey) + response = merged_response + response.status = 200 + response.fromcache = True + + elif response.status == 200: + content = new_content + else: + self.cache.delete(cachekey) + content = new_content + else: + cc = _parse_cache_control(headers) + if cc.has_key('only-if-cached'): + info['status'] = '504' + response = Response(info) + content = "" + else: + (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + except Exception, e: + if self.force_exception_to_status_code: + if isinstance(e, HttpLib2ErrorWithResponse): + response = e.response + content = e.content + response.status = 500 + response.reason = str(e) + elif isinstance(e, socket.timeout): + content = "Request Timeout" + response = Response( { + "content-type": "text/plain", + "status": "408", + "content-length": len(content) + }) + response.reason = "Request Timeout" + else: + content = str(e) + response = Response( { + "content-type": "text/plain", + "status": "400", + "content-length": len(content) + }) + response.reason = "Bad Request" + else: + raise + + + return (response, content) + + def _get_proxy_info(self, scheme, authority): + """Return a ProxyInfo instance (or None) based on the scheme + and authority. + """ + hostname, port = urllib.splitport(authority) + proxy_info = self.proxy_info + if callable(proxy_info): + proxy_info = proxy_info(scheme) + + if (hasattr(proxy_info, 'applies_to') + and not proxy_info.applies_to(hostname)): + proxy_info = None + return proxy_info + + +class Response(dict): + """An object more like email.Message than httplib.HTTPResponse.""" + + """Is this response from our local cache""" + fromcache = False + + """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ + version = 11 + + "Status code returned by server. " + status = 200 + + """Reason phrase returned by server.""" + reason = "Ok" + + previous = None + + def __init__(self, info): + # info is either an email.Message or + # an httplib.HTTPResponse object. + if isinstance(info, httplib.HTTPResponse): + for key, value in info.getheaders(): + self[key.lower()] = value + self.status = info.status + self['status'] = str(self.status) + self.reason = info.reason + self.version = info.version + elif isinstance(info, email.Message.Message): + for key, value in info.items(): + self[key.lower()] = value + self.status = int(self['status']) + else: + for key, value in info.iteritems(): + self[key.lower()] = value + self.status = int(self.get('status', self.status)) + self.reason = self.get('reason', self.reason) + + + def __getattr__(self, name): + if name == 'dict': + return self + else: + raise AttributeError, name diff --git a/lib/httplib2/cacerts.txt b/lib/httplib2/cacerts.txt new file mode 100644 index 0000000..da36ed1 --- /dev/null +++ b/lib/httplib2/cacerts.txt @@ -0,0 +1,714 @@ +# Certifcate Authority certificates for validating SSL connections. +# +# This file contains PEM format certificates generated from +# http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt +# +# ***** BEGIN LICENSE BLOCK ***** +# Version: MPL 1.1/GPL 2.0/LGPL 2.1 +# +# The contents of this file are subject to the Mozilla Public License Version +# 1.1 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.mozilla.org/MPL/ +# +# Software distributed under the License is distributed on an "AS IS" basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License +# for the specific language governing rights and limitations under the +# License. +# +# The Original Code is the Netscape security libraries. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1994-2000 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# +# Alternatively, the contents of this file may be used under the terms of +# either the GNU General Public License Version 2 or later (the "GPL"), or +# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), +# in which case the provisions of the GPL or the LGPL are applicable instead +# of those above. If you wish to allow use of your version of this file only +# under the terms of either the GPL or the LGPL, and not to allow others to +# use your version of this file under the terms of the MPL, indicate your +# decision by deleting the provisions above and replace them with the notice +# and other provisions required by the GPL or the LGPL. If you do not delete +# the provisions above, a recipient may use your version of this file under +# the terms of any one of the MPL, the GPL or the LGPL. +# +# ***** END LICENSE BLOCK ***** + +Verisign/RSA Secure Server CA +============================= + +-----BEGIN CERTIFICATE----- +MIICNDCCAaECEAKtZn5ORf5eV288mBle3cAwDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxIDAeBgNVBAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYD +VQQLEyVTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk0 +MTEwOTAwMDAwMFoXDTEwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxIDAeBgNV +BAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYDVQQLEyVTZWN1cmUgU2Vy +dmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGbMA0GCSqGSIb3DQEBAQUAA4GJ +ADCBhQJ+AJLOesGugz5aqomDV6wlAXYMra6OLDfO6zV4ZFQD5YRAUcm/jwjiioII +0haGN1XpsSECrXZogZoFokvJSyVmIlZsiAeP94FZbYQHZXATcXY+m3dM41CJVphI +uR2nKRoTLkoRWZweFdVJVCxzOmmCsZc5nG1wZ0jl3S3WyB57AgMBAAEwDQYJKoZI +hvcNAQECBQADfgBl3X7hsuyw4jrg7HFGmhkRuNPHoLQDQCYCPgmc4RKz0Vr2N6W3 +YQO2WxZpO8ZECAyIUwxrl0nHPjXcbLm7qt9cuzovk2C2qUtN8iD3zV9/ZHuO3ABc +1/p3yjkWWW8O6tO1g39NTUJWdrTJXwT4OPjr0l91X817/OWOgHz8UA== +-----END CERTIFICATE----- + +Thawte Personal Basic CA +======================== + +-----BEGIN CERTIFICATE----- +MIIDITCCAoqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCByzELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD +VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT +ZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFBlcnNvbmFsIEJhc2lj +IENBMSgwJgYJKoZIhvcNAQkBFhlwZXJzb25hbC1iYXNpY0B0aGF3dGUuY29tMB4X +DTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgcsxCzAJBgNVBAYTAlpBMRUw +EwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEaMBgGA1UE +ChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2Vy +dmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQZXJzb25hbCBCYXNpYyBD +QTEoMCYGCSqGSIb3DQEJARYZcGVyc29uYWwtYmFzaWNAdGhhd3RlLmNvbTCBnzAN +BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAvLyTU23AUE+CFeZIlDWmWr5vQvoPR+53 +dXLdjUmbllegeNTKP1GzaQuRdhciB5dqxFGTS+CN7zeVoQxN2jSQHReJl+A1OFdK +wPQIcOk8RHtQfmGakOMj04gRRif1CwcOu93RfyAKiLlWCy4cgNrx454p7xS9CkT7 +G1sY0b8jkyECAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQF +AAOBgQAt4plrsD16iddZopQBHyvdEktTwq1/qqcAXJFAVyVKOKqEcLnZgA+le1z7 +c8a914phXAPjLSeoF+CEhULcXpvGt7Jtu3Sv5D/Lp7ew4F2+eIMllNLbgQ95B21P +9DkVWlIBe94y1k049hJcBlDfBVu9FEuh3ym6O0GN92NWod8isQ== +-----END CERTIFICATE----- + +Thawte Personal Premium CA +========================== + +-----BEGIN CERTIFICATE----- +MIIDKTCCApKgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBzzELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD +VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT +ZXJ2aWNlcyBEaXZpc2lvbjEjMCEGA1UEAxMaVGhhd3RlIFBlcnNvbmFsIFByZW1p +dW0gQ0ExKjAoBgkqhkiG9w0BCQEWG3BlcnNvbmFsLXByZW1pdW1AdGhhd3RlLmNv +bTAeFw05NjAxMDEwMDAwMDBaFw0yMDEyMzEyMzU5NTlaMIHPMQswCQYDVQQGEwJa +QTEVMBMGA1UECBMMV2VzdGVybiBDYXBlMRIwEAYDVQQHEwlDYXBlIFRvd24xGjAY +BgNVBAoTEVRoYXd0ZSBDb25zdWx0aW5nMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9u +IFNlcnZpY2VzIERpdmlzaW9uMSMwIQYDVQQDExpUaGF3dGUgUGVyc29uYWwgUHJl +bWl1bSBDQTEqMCgGCSqGSIb3DQEJARYbcGVyc29uYWwtcHJlbWl1bUB0aGF3dGUu +Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJZtn4B0TPuYwu8KHvE0Vs +Bd/eJxZRNkERbGw77f4QfRKe5ZtCmv5gMcNmt3M6SK5O0DI3lIi1DbbZ8/JE2dWI +Et12TfIa/G8jHnrx2JhFTgcQ7xZC0EN1bUre4qrJMf8fAHB8Zs8QJQi6+u4A6UYD +ZicRFTuqW/KY3TZCstqIdQIDAQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqG +SIb3DQEBBAUAA4GBAGk2ifc0KjNyL2071CKyuG+axTZmDhs8obF1Wub9NdP4qPIH +b4Vnjt4rueIXsDqg8A6iAJrf8xQVbrvIhVqYgPn/vnQdPfP+MCXRNzRn+qVxeTBh +KXLA4CxM+1bkOqhv5TJZUtt1KFBZDPgLGeSs2a+WjS9Q2wfD6h+rM+D1KzGJ +-----END CERTIFICATE----- + +Thawte Personal Freemail CA +=========================== + +-----BEGIN CERTIFICATE----- +MIIDLTCCApagAwIBAgIBADANBgkqhkiG9w0BAQQFADCB0TELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD +VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT +ZXJ2aWNlcyBEaXZpc2lvbjEkMCIGA1UEAxMbVGhhd3RlIFBlcnNvbmFsIEZyZWVt +YWlsIENBMSswKQYJKoZIhvcNAQkBFhxwZXJzb25hbC1mcmVlbWFpbEB0aGF3dGUu +Y29tMB4XDTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgdExCzAJBgNVBAYT +AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEa +MBgGA1UEChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xJDAiBgNVBAMTG1RoYXd0ZSBQZXJzb25hbCBG +cmVlbWFpbCBDQTErMCkGCSqGSIb3DQEJARYccGVyc29uYWwtZnJlZW1haWxAdGhh +d3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1GnX1LCUZFtx6UfY +DFG26nKRsIRefS0Nj3sS34UldSh0OkIsYyeflXtL734Zhx2G6qPduc6WZBrCFG5E +rHzmj+hND3EfQDimAKOHePb5lIZererAXnbr2RSjXW56fAylS1V/Bhkpf56aJtVq +uzgkCGqYx7Hao5iR/Xnb5VrEHLkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zAN +BgkqhkiG9w0BAQQFAAOBgQDH7JJ+Tvj1lqVnYiqk8E0RYNBvjWBYYawmu1I1XAjP +MPuoSpaKH2JCI4wXD/S6ZJwXrEcp352YXtJsYHFcoqzceePnbgBHH7UNKOgCneSa +/RP0ptl8sfjcXyMmCZGAc9AUG95DqYMl8uacLxXK/qarigd1iwzdUYRr5PjRznei +gQ== +-----END CERTIFICATE----- + +Thawte Server CA +================ + +-----BEGIN CERTIFICATE----- +MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm +MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx +MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 +dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl +cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 +DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD +gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 +yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX +L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj +EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG +7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e +QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ +qdq5snUb9kLy78fyGPmJvKP/iiMucEc= +-----END CERTIFICATE----- + +Thawte Premium Server CA +======================== + +-----BEGIN CERTIFICATE----- +MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy +dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t +MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB +MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG +A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl +cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv +bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE +VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ +ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR +uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG +9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI +hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM +pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== +-----END CERTIFICATE----- + +Equifax Secure CA +================= + +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy +dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 +MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx +dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f +BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A +cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ +MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm +aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw +ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj +IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y +7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh +1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 +-----END CERTIFICATE----- + +Verisign Class 1 Public Primary Certification Authority +======================================================= + +-----BEGIN CERTIFICATE----- +MIICPTCCAaYCEQDNun9W8N/kvFT+IqyzcqpVMA0GCSqGSIb3DQEBAgUAMF8xCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xh +c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05 +NjAxMjkwMDAwMDBaFw0yODA4MDEyMzU5NTlaMF8xCzAJBgNVBAYTAlVTMRcwFQYD +VQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xhc3MgMSBQdWJsaWMgUHJp +bWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCBnzANBgkqhkiG9w0BAQEFAAOB +jQAwgYkCgYEA5Rm/baNWYS2ZSHH2Z965jeu3noaACpEO+jglr0aIguVzqKCbJF0N +H8xlbgyw0FaEGIeaBpsQoXPftFg5a27B9hXVqKg/qhIGjTGsf7A01480Z4gJzRQR +4k5FVmkfeAKA2txHkSm7NsljXMXg1y2He6G3MrB7MLoqLzGq7qNn2tsCAwEAATAN +BgkqhkiG9w0BAQIFAAOBgQBMP7iLxmjf7kMzDl3ppssHhE16M/+SG/Q2rdiVIjZo +EWx8QszznC7EBz8UsA9P/5CSdvnivErpj82ggAr3xSnxgiJduLHdgSOjeyUVRjB5 +FvjqBUuUfx3CHMjjt/QQQDwTw18fU+hI5Ia0e6E1sHslurjTjqs/OJ0ANACY89Fx +lA== +-----END CERTIFICATE----- + +Verisign Class 2 Public Primary Certification Authority +======================================================= + +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEC0b/EoXjaOR6+f/9YtFvgswDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAyIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAyIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQC2WoujDWojg4BrzzmH9CETMwZMJaLtVRKXxaeAufqDwSCg+i8VDXyh +YGt+eSz6Bg86rvYbb7HS/y8oUl+DfUvEerf4Zh+AVPy3wo5ZShRXRtGak75BkQO7 +FYCTXOvnzAhsPz6zSvz/S2wj1VCCJkQZjiPDceoZJEcEnnW/yKYAHwIDAQABMA0G +CSqGSIb3DQEBAgUAA4GBAIobK/o5wXTXXtgZZKJYSi034DNHD6zt96rbHuSLBlxg +J8pFUs4W7z8GZOeUaHxgMxURaa+dYo2jA1Rrpr7l7gUYYAS/QoD90KioHgE796Nc +r6Pc5iaAIzy4RHT3Cq5Ji2F4zCS/iIqnDupzGUH9TQPwiNHleI2lKk/2lw0Xd8rY +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority +======================================================= + +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do +lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc +AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k +-----END CERTIFICATE----- + +Verisign Class 1 Public Primary Certification Authority - G2 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEEzH6qqYPnHTkxD4PTqJkZIwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMSBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQCq0Lq+Fi24g9TK0g+8djHKlNgdk4xWArzZbxpvUjZudVYK +VdPfQ4chEWWKfo+9Id5rMj8bhDSVBZ1BNeuS65bdqlk/AVNtmU/t5eIqWpDBucSm +Fc/IReumXY6cPvBkJHalzasab7bYe1FhbqZ/h8jit+U03EGI6glAvnOSPWvndQID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAKlPww3HZ74sy9mozS11534Vnjty637rXC0J +h9ZrbWB85a7FkCMMXErQr7Fd88e2CtvgFZMN3QO8x3aKtd1Pw5sTdbgBwObJW2ul +uIncrKTdcu1OofdPvAbT6shkdHvClUGcZXNY8ZCaPGqxmMnEh7zPRW1F4m4iP/68 +DzFc6PLZ +-----END CERTIFICATE----- + +Verisign Class 2 Public Primary Certification Authority - G2 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIDAzCCAmwCEQC5L2DMiJ+hekYJuFtwbIqvMA0GCSqGSIb3DQEBBQUAMIHBMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0Ns +YXNzIDIgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjE6MDgGA1UECxMxKGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9y +aXplZCB1c2Ugb25seTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazAe +Fw05ODA1MTgwMDAwMDBaFw0yODA4MDEyMzU5NTlaMIHBMQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0NsYXNzIDIgUHVibGlj +IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjE6MDgGA1UECxMx +KGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEAp4gBIXQs5xoD8JjhlzwPIQjxnNuX6Zr8wgQGE75fUsjM +HiwSViy4AWkszJkfrbCWrnkE8hM5wXuYuggs6MKEEyyqaekJ9MepAqRCwiNPStjw +DqL7MWzJ5m+ZJwf15vRMeJ5t60aG+rmGyVTyssSv1EYcWskVMP8NbPUtDm3Of3cC +AwEAATANBgkqhkiG9w0BAQUFAAOBgQByLvl/0fFx+8Se9sVeUYpAmLho+Jscg9ji +nb3/7aHmZuovCfTK1+qlK5X2JGCGTUQug6XELaDTrnhpb3LabK4I8GOSN+a7xDAX +rXfMSTWqz9iP0b63GJZHc2pUIjRkLbYWm1lbtFFZOrMLFPQS32eg9K0yZF6xRnIn +jBJ7xUS0rg== +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority - G2 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 +pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 +13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk +U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i +F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY +oJ2daZH9 +-----END CERTIFICATE----- + +Verisign Class 4 Public Primary Certification Authority - G2 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEDKIjprS9esTR/h/xCA3JfgwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgNCBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgNCBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQC68OTP+cSuhVS5B1f5j8V/aBH4xBewRNzjMHPVKmIquNDM +HO0oW369atyzkSTKQWI8/AIBvxwWMZQFl3Zuoq29YRdsTjCG8FE3KlDHqGKB3FtK +qsGgtG7rL+VXxbErQHDbWk2hjh+9Ax/YA9SPTJlxvOKCzFjomDqG04Y48wApHwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAIWMEsGnuVAVess+rLhDityq3RS6iYF+ATwj +cSGIL4LcY/oCRaxFWdcqWERbt5+BO5JoPeI3JPV7bI92NZYJqFmduc4jq3TWg/0y +cyfYaT5DdPauxYma51N86Xv2S/PBZYPejYqcPIiNOVn8qj8ijaHBZlCBckztImRP +T8qAkbYp +-----END CERTIFICATE----- + +Verisign Class 1 Public Primary Certification Authority - G3 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCLW3VWhFSFCwDPrzhIzrGkMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN2E1Lm0+afY8wR4 +nN493GwTFtl63SRRZsDHJlkNrAYIwpTRMx/wgzUfbhvI3qpuFU5UJ+/EbRrsC+MO +8ESlV8dAWB6jRx9x7GD2bZTIGDnt/kIYVt/kTEkQeE4BdjVjEjbdZrwBBDajVWjV +ojYJrKshJlQGrT/KFOCsyq0GHZXi+J3x4GD/wn91K0zM2v6HmSHquv4+VNfSWXjb +PG7PoBMAGrgnoeS+Z5bKoMWznN3JdZ7rMJpfo83ZrngZPyPpXNspva1VyBtUjGP2 +6KbqxzcSXKMpHgLZ2x87tNcPVkeBFQRKr4Mn0cVYiMHd9qqnoxjaaKptEVHhv2Vr +n5Z20T0CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAq2aN17O6x5q25lXQBfGfMY1a +qtmqRiYPce2lrVNWYgFHKkTp/j90CxObufRNG7LRX7K20ohcs5/Ny9Sn2WCVhDr4 +wTcdYcrnsMXlkdpUpqwxga6X3s0IrLjAl4B/bnKk52kTlWUfxJM8/XmPBNQ+T+r3 +ns7NZ3xPZQL/kYVUc8f/NveGLezQXk//EZ9yBta4GvFMDSZl4kSAHsef493oCtrs +pSCAaWihT37ha88HQfqDjrw43bAuEbFrskLMmrz5SCJ5ShkPshw+IHTZasO+8ih4 +E1Z5T21Q6huwtVexN2ZYI/PcD98Kh8TvhgXVOBRgmaNL3gaWcSzy27YfpO8/7g== +-----END CERTIFICATE----- + +Verisign Class 2 Public Primary Certification Authority - G3 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIEGTCCAwECEGFwy0mMX5hFKeewptlQW3owDQYJKoZIhvcNAQEFBQAwgcoxCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVy +aVNpZ24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24s +IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNp +Z24gQ2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEczMB4XDTk5MTAwMTAwMDAwMFoXDTM2MDcxNjIzNTk1OVowgcoxCzAJBgNV +BAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNp +Z24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24sIElu +Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNpZ24g +Q2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAt +IEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArwoNwtUs22e5LeWU +J92lvuCwTY+zYVY81nzD9M0+hsuiiOLh2KRpxbXiv8GmR1BeRjmL1Za6tW8UvxDO +JxOeBUebMXoT2B/Z0wI3i60sR/COgQanDTAM6/c8DyAd3HJG7qUCyFvDyVZpTMUY +wZF7C9UTAJu878NIPkZgIIUq1ZC2zYugzDLdt/1AVbJQHFauzI13TccgTacxdu9o +koqQHgiBVrKtaaNS0MscxCM9H5n+TOgWY47GCI72MfbS+uV23bUckqNJzc0BzWjN +qWm6o+sdDZykIKbBoMXRRkwXbdKsZj+WjOCE1Db/IlnF+RFgqF8EffIa9iVCYQ/E +Srg+iQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQA0JhU8wI1NQ0kdvekhktdmnLfe +xbjQ5F1fdiLAJvmEOjr5jLX77GDx6M4EsMjdpwOPMPOY36TmpDHf0xwLRtxyID+u +7gU8pDM/CzmscHhzS5kr3zDCVLCoO1Wh/hYozUK9dG6A2ydEp85EXdQbkJgNHkKU +sQAsBNB0owIFImNjzYO1+8FtYmtpdf1dcEG59b98377BMnMiIYtYgXsVkXq642RI +sH/7NiXaldDxJBQX3RiAa0YjOVT1jmIJBB2UkKab5iXiQkWquJCtvgiPqQtCGJTP +cjnhsUPgKM+351psE2tJs//jGHyJizNdrDPXp/naOlXJWBD5qu9ats9LS98q +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority - G3 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +Verisign Class 4 Public Primary Certification Authority - G3 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 +GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ ++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd +U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm +NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY +ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ +ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 +CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq +g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm +fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c +2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ +bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== +-----END CERTIFICATE----- + +Equifax Secure Global eBusiness CA +================================== + +-----BEGIN CERTIFICATE----- +MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT +ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw +MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj +dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l +c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC +UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc +58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ +o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr +aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA +A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA +Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv +8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV +-----END CERTIFICATE----- + +Equifax Secure eBusiness CA 1 +============================= + +-----BEGIN CERTIFICATE----- +MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT +ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw +MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j +LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ +KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo +RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu +WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw +Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK +eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM +zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ +WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN +/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== +-----END CERTIFICATE----- + +Equifax Secure eBusiness CA 2 +============================= + +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj +dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 +NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD +VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G +vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ +BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX +MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl +IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw +NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq +y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy +0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 +E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN +-----END CERTIFICATE----- + +Thawte Time Stamping CA +======================= + +-----BEGIN CERTIFICATE----- +MIICoTCCAgqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBizELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzAN +BgNVBAoTBlRoYXd0ZTEdMBsGA1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAd +BgNVBAMTFlRoYXd0ZSBUaW1lc3RhbXBpbmcgQ0EwHhcNOTcwMTAxMDAwMDAwWhcN +MjAxMjMxMjM1OTU5WjCBizELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4g +Q2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzANBgNVBAoTBlRoYXd0ZTEdMBsG +A1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAdBgNVBAMTFlRoYXd0ZSBUaW1l +c3RhbXBpbmcgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANYrWHhhRYZT +6jR7UZztsOYuGA7+4F+oJ9O0yeB8WU4WDnNUYMF/9p8u6TqFJBU820cEY8OexJQa +Wt9MevPZQx08EHp5JduQ/vBR5zDWQQD9nyjfeb6Uu522FOMjhdepQeBMpHmwKxqL +8vg7ij5FrHGSALSQQZj7X+36ty6K+Ig3AgMBAAGjEzARMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEEBQADgYEAZ9viwuaHPUCDhjc1fR/OmsMMZiCouqoEiYbC +9RAIDb/LogWK0E02PvTX72nGXuSwlG9KuefeW4i2e9vjJ+V2w/A1wcu1J5szedyQ +pgCed/r8zSeUQhac0xxo7L9c3eWpexAKMnRUEzGLhQOEkbdYATAUOK8oyvyxUBkZ +CayJSdM= +-----END CERTIFICATE----- + +thawte Primary Root CA +====================== + +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +VeriSign Class 3 Public Primary Certification Authority - G5 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +Entrust.net Secure Server Certification Authority +================================================= + +-----BEGIN CERTIFICATE----- +MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC +VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u +ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc +KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u +ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 +MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE +ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j +b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF +bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg +U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ +I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 +wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC +AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb +oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 +BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p +dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk +MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp +b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu +dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 +MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi +E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa +MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI +hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN +95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd +2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= +-----END CERTIFICATE----- + +Go Daddy Certification Authority Root Certificate Bundle +======================================================== + +-----BEGIN CERTIFICATE----- +MIIE3jCCA8agAwIBAgICAwEwDQYJKoZIhvcNAQEFBQAwYzELMAkGA1UEBhMCVVMx +ITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g +RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMTYw +MTU0MzdaFw0yNjExMTYwMTU0MzdaMIHKMQswCQYDVQQGEwJVUzEQMA4GA1UECBMH +QXJpem9uYTETMBEGA1UEBxMKU2NvdHRzZGFsZTEaMBgGA1UEChMRR29EYWRkeS5j +b20sIEluYy4xMzAxBgNVBAsTKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5j +b20vcmVwb3NpdG9yeTEwMC4GA1UEAxMnR28gRGFkZHkgU2VjdXJlIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5MREwDwYDVQQFEwgwNzk2OTI4NzCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAMQt1RWMnCZM7DI161+4WQFapmGBWTtwY6vj3D3H +KrjJM9N55DrtPDAjhI6zMBS2sofDPZVUBJ7fmd0LJR4h3mUpfjWoqVTr9vcyOdQm +VZWt7/v+WIbXnvQAjYwqDL1CBM6nPwT27oDyqu9SoWlm2r4arV3aLGbqGmu75RpR +SgAvSMeYddi5Kcju+GZtCpyz8/x4fKL4o/K1w/O5epHBp+YlLpyo7RJlbmr2EkRT +cDCVw5wrWCs9CHRK8r5RsL+H0EwnWGu1NcWdrxcx+AuP7q2BNgWJCJjPOq8lh8BJ +6qf9Z/dFjpfMFDniNoW1fho3/Rb2cRGadDAW/hOUoz+EDU8CAwEAAaOCATIwggEu +MB0GA1UdDgQWBBT9rGEyk2xF1uLuhV+auud2mWjM5zAfBgNVHSMEGDAWgBTSxLDS +kdRMEXGzYcs9of7dqGrU4zASBgNVHRMBAf8ECDAGAQH/AgEAMDMGCCsGAQUFBwEB +BCcwJTAjBggrBgEFBQcwAYYXaHR0cDovL29jc3AuZ29kYWRkeS5jb20wRgYDVR0f +BD8wPTA7oDmgN4Y1aHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNvbS9yZXBv +c2l0b3J5L2dkcm9vdC5jcmwwSwYDVR0gBEQwQjBABgRVHSAAMDgwNgYIKwYBBQUH +AgEWKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5jb20vcmVwb3NpdG9yeTAO +BgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBANKGwOy9+aG2Z+5mC6IG +OgRQjhVyrEp0lVPLN8tESe8HkGsz2ZbwlFalEzAFPIUyIXvJxwqoJKSQ3kbTJSMU +A2fCENZvD117esyfxVgqwcSeIaha86ykRvOe5GPLL5CkKSkB2XIsKd83ASe8T+5o +0yGPwLPk9Qnt0hCqU7S+8MxZC9Y7lhyVJEnfzuz9p0iRFEUOOjZv2kWzRaJBydTX +RE4+uXR21aITVSzGh6O1mawGhId/dQb8vxRMDsxuxN89txJx9OjxUUAiKEngHUuH +qDTMBqLdElrRhjZkAzVvb3du6/KFUJheqwNTrZEjYx8WnM25sgVjOuH0aBsXBTWV +U+4= +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIE+zCCBGSgAwIBAgICAQ0wDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1Zh +bGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIElu +Yy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24g +QXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAe +BgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTA0MDYyOTE3MDYyMFoX +DTI0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBE +YWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3MgMiBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgC +ggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv +2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+q +N1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiO +r18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lN +f4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+YihfukEH +U1jPEX44dMX4/7VpkI+EdOqXG68CAQOjggHhMIIB3TAdBgNVHQ4EFgQU0sSw0pHU +TBFxs2HLPaH+3ahq1OMwgdIGA1UdIwSByjCBx6GBwaSBvjCBuzEkMCIGA1UEBxMb +VmFsaUNlcnQgVmFsaWRhdGlvbiBOZXR3b3JrMRcwFQYDVQQKEw5WYWxpQ2VydCwg +SW5jLjE1MDMGA1UECxMsVmFsaUNlcnQgQ2xhc3MgMiBQb2xpY3kgVmFsaWRhdGlv +biBBdXRob3JpdHkxITAfBgNVBAMTGGh0dHA6Ly93d3cudmFsaWNlcnQuY29tLzEg +MB4GCSqGSIb3DQEJARYRaW5mb0B2YWxpY2VydC5jb22CAQEwDwYDVR0TAQH/BAUw +AwEB/zAzBggrBgEFBQcBAQQnMCUwIwYIKwYBBQUHMAGGF2h0dHA6Ly9vY3NwLmdv +ZGFkZHkuY29tMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jZXJ0aWZpY2F0ZXMu +Z29kYWRkeS5jb20vcmVwb3NpdG9yeS9yb290LmNybDBLBgNVHSAERDBCMEAGBFUd +IAAwODA2BggrBgEFBQcCARYqaHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNv +bS9yZXBvc2l0b3J5MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOBgQC1 +QPmnHfbq/qQaQlpE9xXUhUaJwL6e4+PrxeNYiY+Sn1eocSxI0YGyeR+sBjUZsE4O +WBsUs5iB0QQeyAfJg594RAoYC5jcdnplDQ1tgMQLARzLrUc+cb53S8wGd9D0Vmsf +SxOaFIqII6hR8INMqzW/Rn453HWkrugp++85j09VZw== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy +NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY +dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 +WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS +v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v +UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu +IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC +W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd +-----END CERTIFICATE----- + diff --git a/lib/httplib2/iri2uri.py b/lib/httplib2/iri2uri.py new file mode 100644 index 0000000..70667ed --- /dev/null +++ b/lib/httplib2/iri2uri.py @@ -0,0 +1,110 @@ +""" +iri2uri + +Converts an IRI to a URI. + +""" +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = [] +__version__ = "1.0.0" +__license__ = "MIT" +__history__ = """ +""" + +import urlparse + + +# Convert an IRI to a URI following the rules in RFC 3987 +# +# The characters we need to enocde and escape are defined in the spec: +# +# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD +# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF +# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD +# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD +# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD +# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD +# / %xD0000-DFFFD / %xE1000-EFFFD + +escape_range = [ + (0xA0, 0xD7FF ), + (0xE000, 0xF8FF ), + (0xF900, 0xFDCF ), + (0xFDF0, 0xFFEF), + (0x10000, 0x1FFFD ), + (0x20000, 0x2FFFD ), + (0x30000, 0x3FFFD), + (0x40000, 0x4FFFD ), + (0x50000, 0x5FFFD ), + (0x60000, 0x6FFFD), + (0x70000, 0x7FFFD ), + (0x80000, 0x8FFFD ), + (0x90000, 0x9FFFD), + (0xA0000, 0xAFFFD ), + (0xB0000, 0xBFFFD ), + (0xC0000, 0xCFFFD), + (0xD0000, 0xDFFFD ), + (0xE1000, 0xEFFFD), + (0xF0000, 0xFFFFD ), + (0x100000, 0x10FFFD) +] + +def encode(c): + retval = c + i = ord(c) + for low, high in escape_range: + if i < low: + break + if i >= low and i <= high: + retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')]) + break + return retval + + +def iri2uri(uri): + """Convert an IRI to a URI. Note that IRIs must be + passed in a unicode strings. That is, do not utf-8 encode + the IRI before passing it into the function.""" + if isinstance(uri ,unicode): + (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) + authority = authority.encode('idna') + # For each character in 'ucschar' or 'iprivate' + # 1. encode as utf-8 + # 2. then %-encode each octet of that utf-8 + uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) + uri = "".join([encode(c) for c in uri]) + return uri + +if __name__ == "__main__": + import unittest + + class Test(unittest.TestCase): + + def test_uris(self): + """Test that URIs are invariant under the transformation.""" + invariant = [ + u"ftp://ftp.is.co.za/rfc/rfc1808.txt", + u"http://www.ietf.org/rfc/rfc2396.txt", + u"ldap://[2001:db8::7]/c=GB?objectClass?one", + u"mailto:John.Doe@example.com", + u"news:comp.infosystems.www.servers.unix", + u"tel:+1-816-555-1212", + u"telnet://192.0.2.16:80/", + u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] + for uri in invariant: + self.assertEqual(uri, iri2uri(uri)) + + def test_iri(self): + """ Test that the right type of escaping is done for each part of the URI.""" + self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}")) + self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}")) + self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}")) + self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) + self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) + + unittest.main() + + diff --git a/lib/httplib2/socks.py b/lib/httplib2/socks.py new file mode 100644 index 0000000..0991f4c --- /dev/null +++ b/lib/httplib2/socks.py @@ -0,0 +1,438 @@ +"""SocksiPy - Python SOCKS module. +Version 1.00 + +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. + + +This module provides a standard socket-like interface for Python +for tunneling connections through SOCKS proxies. + +""" + +""" + +Minor modifications made by Christopher Gilbert (http://motomastyle.com/) +for use in PyLoris (http://pyloris.sourceforge.net/) + +Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) +mainly to merge bug fixes found in Sourceforge + +""" + +import base64 +import socket +import struct +import sys + +if getattr(socket, 'socket', None) is None: + raise ImportError('socket.socket missing, proxy support unusable') + +PROXY_TYPE_SOCKS4 = 1 +PROXY_TYPE_SOCKS5 = 2 +PROXY_TYPE_HTTP = 3 +PROXY_TYPE_HTTP_NO_TUNNEL = 4 + +_defaultproxy = None +_orgsocket = socket.socket + +class ProxyError(Exception): pass +class GeneralProxyError(ProxyError): pass +class Socks5AuthError(ProxyError): pass +class Socks5Error(ProxyError): pass +class Socks4Error(ProxyError): pass +class HTTPError(ProxyError): pass + +_generalerrors = ("success", + "invalid data", + "not connected", + "not available", + "bad proxy type", + "bad input") + +_socks5errors = ("succeeded", + "general SOCKS server failure", + "connection not allowed by ruleset", + "Network unreachable", + "Host unreachable", + "Connection refused", + "TTL expired", + "Command not supported", + "Address type not supported", + "Unknown error") + +_socks5autherrors = ("succeeded", + "authentication is required", + "all offered authentication methods were rejected", + "unknown username or invalid password", + "unknown error") + +_socks4errors = ("request granted", + "request rejected or failed", + "request rejected because SOCKS server cannot connect to identd on the client", + "request rejected because the client program and identd report different user-ids", + "unknown error") + +def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): + """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) + Sets a default proxy which all further socksocket objects will use, + unless explicitly changed. + """ + global _defaultproxy + _defaultproxy = (proxytype, addr, port, rdns, username, password) + +def wrapmodule(module): + """wrapmodule(module) + Attempts to replace a module's socket library with a SOCKS socket. Must set + a default proxy using setdefaultproxy(...) first. + This will only work on modules that import socket directly into the namespace; + most of the Python Standard Library falls into this category. + """ + if _defaultproxy != None: + module.socket.socket = socksocket + else: + raise GeneralProxyError((4, "no proxy specified")) + +class socksocket(socket.socket): + """socksocket([family[, type[, proto]]]) -> socket object + Open a SOCKS enabled socket. The parameters are the same as + those of the standard socket init. In order for SOCKS to work, + you must specify family=AF_INET, type=SOCK_STREAM and proto=0. + """ + + def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): + _orgsocket.__init__(self, family, type, proto, _sock) + if _defaultproxy != None: + self.__proxy = _defaultproxy + else: + self.__proxy = (None, None, None, None, None, None) + self.__proxysockname = None + self.__proxypeername = None + self.__httptunnel = True + + def __recvall(self, count): + """__recvall(count) -> data + Receive EXACTLY the number of bytes requested from the socket. + Blocks until the required number of bytes have been received. + """ + data = self.recv(count) + while len(data) < count: + d = self.recv(count-len(data)) + if not d: raise GeneralProxyError((0, "connection closed unexpectedly")) + data = data + d + return data + + def sendall(self, content, *args): + """ override socket.socket.sendall method to rewrite the header + for non-tunneling proxies if needed + """ + if not self.__httptunnel: + content = self.__rewriteproxy(content) + return super(socksocket, self).sendall(content, *args) + + def __rewriteproxy(self, header): + """ rewrite HTTP request headers to support non-tunneling proxies + (i.e. those which do not support the CONNECT method). + This only works for HTTP (not HTTPS) since HTTPS requires tunneling. + """ + host, endpt = None, None + hdrs = header.split("\r\n") + for hdr in hdrs: + if hdr.lower().startswith("host:"): + host = hdr + elif hdr.lower().startswith("get") or hdr.lower().startswith("post"): + endpt = hdr + if host and endpt: + hdrs.remove(host) + hdrs.remove(endpt) + host = host.split(" ")[1] + endpt = endpt.split(" ") + if (self.__proxy[4] != None and self.__proxy[5] != None): + hdrs.insert(0, self.__getauthheader()) + hdrs.insert(0, "Host: %s" % host) + hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2])) + return "\r\n".join(hdrs) + + def __getauthheader(self): + auth = self.__proxy[4] + ":" + self.__proxy[5] + return "Proxy-Authorization: Basic " + base64.b64encode(auth) + + def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): + """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) + Sets the proxy to be used. + proxytype - The type of the proxy to be used. Three types + are supported: PROXY_TYPE_SOCKS4 (including socks4a), + PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP + addr - The address of the server (IP or DNS). + port - The port of the server. Defaults to 1080 for SOCKS + servers and 8080 for HTTP proxy servers. + rdns - Should DNS queries be preformed on the remote side + (rather than the local side). The default is True. + Note: This has no effect with SOCKS4 servers. + username - Username to authenticate with to the server. + The default is no authentication. + password - Password to authenticate with to the server. + Only relevant when username is also provided. + """ + self.__proxy = (proxytype, addr, port, rdns, username, password) + + def __negotiatesocks5(self, destaddr, destport): + """__negotiatesocks5(self,destaddr,destport) + Negotiates a connection through a SOCKS5 server. + """ + # First we'll send the authentication packages we support. + if (self.__proxy[4]!=None) and (self.__proxy[5]!=None): + # The username/password details were supplied to the + # setproxy method so we support the USERNAME/PASSWORD + # authentication (in addition to the standard none). + self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02)) + else: + # No username/password were entered, therefore we + # only support connections with no authentication. + self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00)) + # We'll receive the server's response to determine which + # method was selected + chosenauth = self.__recvall(2) + if chosenauth[0:1] != chr(0x05).encode(): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + # Check the chosen authentication method + if chosenauth[1:2] == chr(0x00).encode(): + # No authentication is required + pass + elif chosenauth[1:2] == chr(0x02).encode(): + # Okay, we need to perform a basic username/password + # authentication. + self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5]) + authstat = self.__recvall(2) + if authstat[0:1] != chr(0x01).encode(): + # Bad response + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + if authstat[1:2] != chr(0x00).encode(): + # Authentication failed + self.close() + raise Socks5AuthError((3, _socks5autherrors[3])) + # Authentication succeeded + else: + # Reaching here is always bad + self.close() + if chosenauth[1] == chr(0xFF).encode(): + raise Socks5AuthError((2, _socks5autherrors[2])) + else: + raise GeneralProxyError((1, _generalerrors[1])) + # Now we can request the actual connection + req = struct.pack('BBB', 0x05, 0x01, 0x00) + # If the given destination address is an IP address, we'll + # use the IPv4 address request even if remote resolving was specified. + try: + ipaddr = socket.inet_aton(destaddr) + req = req + chr(0x01).encode() + ipaddr + except socket.error: + # Well it's not an IP number, so it's probably a DNS name. + if self.__proxy[3]: + # Resolve remotely + ipaddr = None + req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr + else: + # Resolve locally + ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) + req = req + chr(0x01).encode() + ipaddr + req = req + struct.pack(">H", destport) + self.sendall(req) + # Get the response + resp = self.__recvall(4) + if resp[0:1] != chr(0x05).encode(): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + elif resp[1:2] != chr(0x00).encode(): + # Connection failed + self.close() + if ord(resp[1:2])<=8: + raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])])) + else: + raise Socks5Error((9, _socks5errors[9])) + # Get the bound address/port + elif resp[3:4] == chr(0x01).encode(): + boundaddr = self.__recvall(4) + elif resp[3:4] == chr(0x03).encode(): + resp = resp + self.recv(1) + boundaddr = self.__recvall(ord(resp[4:5])) + else: + self.close() + raise GeneralProxyError((1,_generalerrors[1])) + boundport = struct.unpack(">H", self.__recvall(2))[0] + self.__proxysockname = (boundaddr, boundport) + if ipaddr != None: + self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) + else: + self.__proxypeername = (destaddr, destport) + + def getproxysockname(self): + """getsockname() -> address info + Returns the bound IP address and port number at the proxy. + """ + return self.__proxysockname + + def getproxypeername(self): + """getproxypeername() -> address info + Returns the IP and port number of the proxy. + """ + return _orgsocket.getpeername(self) + + def getpeername(self): + """getpeername() -> address info + Returns the IP address and port number of the destination + machine (note: getproxypeername returns the proxy) + """ + return self.__proxypeername + + def __negotiatesocks4(self,destaddr,destport): + """__negotiatesocks4(self,destaddr,destport) + Negotiates a connection through a SOCKS4 server. + """ + # Check if the destination address provided is an IP address + rmtrslv = False + try: + ipaddr = socket.inet_aton(destaddr) + except socket.error: + # It's a DNS name. Check where it should be resolved. + if self.__proxy[3]: + ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01) + rmtrslv = True + else: + ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) + # Construct the request packet + req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr + # The username parameter is considered userid for SOCKS4 + if self.__proxy[4] != None: + req = req + self.__proxy[4] + req = req + chr(0x00).encode() + # DNS name if remote resolving is required + # NOTE: This is actually an extension to the SOCKS4 protocol + # called SOCKS4A and may not be supported in all cases. + if rmtrslv: + req = req + destaddr + chr(0x00).encode() + self.sendall(req) + # Get the response from the server + resp = self.__recvall(8) + if resp[0:1] != chr(0x00).encode(): + # Bad data + self.close() + raise GeneralProxyError((1,_generalerrors[1])) + if resp[1:2] != chr(0x5A).encode(): + # Server returned an error + self.close() + if ord(resp[1:2]) in (91, 92, 93): + self.close() + raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90])) + else: + raise Socks4Error((94, _socks4errors[4])) + # Get the bound address/port + self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0]) + if rmtrslv != None: + self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) + else: + self.__proxypeername = (destaddr, destport) + + def __negotiatehttp(self, destaddr, destport): + """__negotiatehttp(self,destaddr,destport) + Negotiates a connection through an HTTP server. + """ + # If we need to resolve locally, we do this now + if not self.__proxy[3]: + addr = socket.gethostbyname(destaddr) + else: + addr = destaddr + headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"] + headers += ["Host: ", destaddr, "\r\n"] + if (self.__proxy[4] != None and self.__proxy[5] != None): + headers += [self.__getauthheader(), "\r\n"] + headers.append("\r\n") + self.sendall("".join(headers).encode()) + # We read the response until we get the string "\r\n\r\n" + resp = self.recv(1) + while resp.find("\r\n\r\n".encode()) == -1: + resp = resp + self.recv(1) + # We just need the first line to check if the connection + # was successful + statusline = resp.splitlines()[0].split(" ".encode(), 2) + if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + try: + statuscode = int(statusline[1]) + except ValueError: + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + if statuscode != 200: + self.close() + raise HTTPError((statuscode, statusline[2])) + self.__proxysockname = ("0.0.0.0", 0) + self.__proxypeername = (addr, destport) + + def connect(self, destpair): + """connect(self, despair) + Connects to the specified destination through a proxy. + destpar - A tuple of the IP/DNS address and the port number. + (identical to socket's connect). + To select the proxy server use setproxy(). + """ + # Do a minimal input check first + if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int): + raise GeneralProxyError((5, _generalerrors[5])) + if self.__proxy[0] == PROXY_TYPE_SOCKS5: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 1080 + _orgsocket.connect(self, (self.__proxy[1], portnum)) + self.__negotiatesocks5(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_SOCKS4: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 1080 + _orgsocket.connect(self,(self.__proxy[1], portnum)) + self.__negotiatesocks4(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_HTTP: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 8080 + _orgsocket.connect(self,(self.__proxy[1], portnum)) + self.__negotiatehttp(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 8080 + _orgsocket.connect(self,(self.__proxy[1],portnum)) + if destpair[1] == 443: + self.__negotiatehttp(destpair[0],destpair[1]) + else: + self.__httptunnel = False + elif self.__proxy[0] == None: + _orgsocket.connect(self, (destpair[0], destpair[1])) + else: + raise GeneralProxyError((4, _generalerrors[4])) diff --git a/lib/oauth2/__init__.py b/lib/oauth2/__init__.py new file mode 100644 index 0000000..835270e --- /dev/null +++ b/lib/oauth2/__init__.py @@ -0,0 +1,860 @@ +""" +The MIT License + +Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import base64 +import urllib +import time +import random +import urlparse +import hmac +import binascii +import httplib2 + +try: + from urlparse import parse_qs + parse_qs # placate pyflakes +except ImportError: + # fall back for Python 2.5 + from cgi import parse_qs + +try: + from hashlib import sha1 + sha = sha1 +except ImportError: + # hashlib was added in Python 2.5 + import sha + +import _version + +__version__ = _version.__version__ + +OAUTH_VERSION = '1.0' # Hi Blaine! +HTTP_METHOD = 'GET' +SIGNATURE_METHOD = 'PLAINTEXT' + + +class Error(RuntimeError): + """Generic exception class.""" + + def __init__(self, message='OAuth error occurred.'): + self._message = message + + @property + def message(self): + """A hack to get around the deprecation errors in 2.6.""" + return self._message + + def __str__(self): + return self._message + + +class MissingSignature(Error): + pass + + +def build_authenticate_header(realm=''): + """Optional WWW-Authenticate header (401 error)""" + return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} + + +def build_xoauth_string(url, consumer, token=None): + """Build an XOAUTH string for use in SMTP/IMPA authentication.""" + request = Request.from_consumer_and_token(consumer, token, + "GET", url) + + signing_method = SignatureMethod_HMAC_SHA1() + request.sign_request(signing_method, consumer, token) + + params = [] + for k, v in sorted(request.iteritems()): + if v is not None: + params.append('%s="%s"' % (k, escape(v))) + + return "%s %s %s" % ("GET", url, ','.join(params)) + + +def to_unicode(s): + """ Convert to unicode, raise exception with instructive error + message if s is not unicode, ascii, or utf-8. """ + if not isinstance(s, unicode): + if not isinstance(s, str): + raise TypeError('You are required to pass either unicode or string here, not: %r (%s)' % (type(s), s)) + try: + s = s.decode('utf-8') + except UnicodeDecodeError, le: + raise TypeError('You are required to pass either a unicode object or a utf-8 string here. You passed a Python string object which contained non-utf-8: %r. The UnicodeDecodeError that resulted from attempting to interpret it as utf-8 was: %s' % (s, le,)) + return s + +def to_utf8(s): + return to_unicode(s).encode('utf-8') + +def to_unicode_if_string(s): + if isinstance(s, basestring): + return to_unicode(s) + else: + return s + +def to_utf8_if_string(s): + if isinstance(s, basestring): + return to_utf8(s) + else: + return s + +def to_unicode_optional_iterator(x): + """ + Raise TypeError if x is a str containing non-utf8 bytes or if x is + an iterable which contains such a str. + """ + if isinstance(x, basestring): + return to_unicode(x) + + try: + l = list(x) + except TypeError, e: + assert 'is not iterable' in str(e) + return x + else: + return [ to_unicode(e) for e in l ] + +def to_utf8_optional_iterator(x): + """ + Raise TypeError if x is a str or if x is an iterable which + contains a str. + """ + if isinstance(x, basestring): + return to_utf8(x) + + try: + l = list(x) + except TypeError, e: + assert 'is not iterable' in str(e) + return x + else: + return [ to_utf8_if_string(e) for e in l ] + +def escape(s): + """Escape a URL including any /.""" + return urllib.quote(s.encode('utf-8'), safe='~') + +def generate_timestamp(): + """Get seconds since epoch (UTC).""" + return int(time.time()) + + +def generate_nonce(length=8): + """Generate pseudorandom number.""" + return ''.join([str(random.randint(0, 9)) for i in range(length)]) + + +def generate_verifier(length=8): + """Generate pseudorandom number.""" + return ''.join([str(random.randint(0, 9)) for i in range(length)]) + + +class Consumer(object): + """A consumer of OAuth-protected services. + + The OAuth consumer is a "third-party" service that wants to access + protected resources from an OAuth service provider on behalf of an end + user. It's kind of the OAuth client. + + Usually a consumer must be registered with the service provider by the + developer of the consumer software. As part of that process, the service + provider gives the consumer a *key* and a *secret* with which the consumer + software can identify itself to the service. The consumer will include its + key in each request to identify itself, but will use its secret only when + signing requests, to prove that the request is from that particular + registered consumer. + + Once registered, the consumer can then use its consumer credentials to ask + the service provider for a request token, kicking off the OAuth + authorization process. + """ + + key = None + secret = None + + def __init__(self, key, secret): + self.key = key + self.secret = secret + + if self.key is None or self.secret is None: + raise ValueError("Key and secret must be set.") + + def __str__(self): + data = {'oauth_consumer_key': self.key, + 'oauth_consumer_secret': self.secret} + + return urllib.urlencode(data) + + +class Token(object): + """An OAuth credential used to request authorization or a protected + resource. + + Tokens in OAuth comprise a *key* and a *secret*. The key is included in + requests to identify the token being used, but the secret is used only in + the signature, to prove that the requester is who the server gave the + token to. + + When first negotiating the authorization, the consumer asks for a *request + token* that the live user authorizes with the service provider. The + consumer then exchanges the request token for an *access token* that can + be used to access protected resources. + """ + + key = None + secret = None + callback = None + callback_confirmed = None + verifier = None + + def __init__(self, key, secret): + self.key = key + self.secret = secret + + if self.key is None or self.secret is None: + raise ValueError("Key and secret must be set.") + + def set_callback(self, callback): + self.callback = callback + self.callback_confirmed = 'true' + + def set_verifier(self, verifier=None): + if verifier is not None: + self.verifier = verifier + else: + self.verifier = generate_verifier() + + def get_callback_url(self): + if self.callback and self.verifier: + # Append the oauth_verifier. + parts = urlparse.urlparse(self.callback) + scheme, netloc, path, params, query, fragment = parts[:6] + if query: + query = '%s&oauth_verifier=%s' % (query, self.verifier) + else: + query = 'oauth_verifier=%s' % self.verifier + return urlparse.urlunparse((scheme, netloc, path, params, + query, fragment)) + return self.callback + + def to_string(self): + """Returns this token as a plain string, suitable for storage. + + The resulting string includes the token's secret, so you should never + send or store this string where a third party can read it. + """ + + data = { + 'oauth_token': self.key, + 'oauth_token_secret': self.secret, + } + + if self.callback_confirmed is not None: + data['oauth_callback_confirmed'] = self.callback_confirmed + return urllib.urlencode(data) + + @staticmethod + def from_string(s): + """Deserializes a token from a string like one returned by + `to_string()`.""" + + if not len(s): + raise ValueError("Invalid parameter string.") + + params = parse_qs(s, keep_blank_values=False) + if not len(params): + raise ValueError("Invalid parameter string.") + + try: + key = params['oauth_token'][0] + except Exception: + raise ValueError("'oauth_token' not found in OAuth request.") + + try: + secret = params['oauth_token_secret'][0] + except Exception: + raise ValueError("'oauth_token_secret' not found in " + "OAuth request.") + + token = Token(key, secret) + try: + token.callback_confirmed = params['oauth_callback_confirmed'][0] + except KeyError: + pass # 1.0, no callback confirmed. + return token + + def __str__(self): + return self.to_string() + + +def setter(attr): + name = attr.__name__ + + def getter(self): + try: + return self.__dict__[name] + except KeyError: + raise AttributeError(name) + + def deleter(self): + del self.__dict__[name] + + return property(getter, attr, deleter) + + +class Request(dict): + + """The parameters and information for an HTTP request, suitable for + authorizing with OAuth credentials. + + When a consumer wants to access a service's protected resources, it does + so using a signed HTTP request identifying itself (the consumer) with its + key, and providing an access token authorized by the end user to access + those resources. + + """ + + version = OAUTH_VERSION + + def __init__(self, method=HTTP_METHOD, url=None, parameters=None, + body='', is_form_encoded=False): + if url is not None: + self.url = to_unicode(url) + self.method = method + if parameters is not None: + for k, v in parameters.iteritems(): + k = to_unicode(k) + v = to_unicode_optional_iterator(v) + self[k] = v + self.body = body + self.is_form_encoded = is_form_encoded + + + @setter + def url(self, value): + self.__dict__['url'] = value + if value is not None: + scheme, netloc, path, params, query, fragment = urlparse.urlparse(value) + + # Exclude default port numbers. + if scheme == 'http' and netloc[-3:] == ':80': + netloc = netloc[:-3] + elif scheme == 'https' and netloc[-4:] == ':443': + netloc = netloc[:-4] + if scheme not in ('http', 'https'): + raise ValueError("Unsupported URL %s (%s)." % (value, scheme)) + + # Normalized URL excludes params, query, and fragment. + self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None)) + else: + self.normalized_url = None + self.__dict__['url'] = None + + @setter + def method(self, value): + self.__dict__['method'] = value.upper() + + def _get_timestamp_nonce(self): + return self['oauth_timestamp'], self['oauth_nonce'] + + def get_nonoauth_parameters(self): + """Get any non-OAuth parameters.""" + return dict([(k, v) for k, v in self.iteritems() + if not k.startswith('oauth_')]) + + def to_header(self, realm=''): + """Serialize as a header for an HTTPAuth request.""" + oauth_params = ((k, v) for k, v in self.items() + if k.startswith('oauth_')) + stringy_params = ((k, escape(str(v))) for k, v in oauth_params) + header_params = ('%s="%s"' % (k, v) for k, v in stringy_params) + params_header = ', '.join(header_params) + + auth_header = 'OAuth realm="%s"' % realm + if params_header: + auth_header = "%s, %s" % (auth_header, params_header) + + return {'Authorization': auth_header} + + def to_postdata(self): + """Serialize as post data for a POST request.""" + d = {} + for k, v in self.iteritems(): + d[k.encode('utf-8')] = to_utf8_optional_iterator(v) + + # tell urlencode to deal with sequence values and map them correctly + # to resulting querystring. for example self["k"] = ["v1", "v2"] will + # result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D + return urllib.urlencode(d, True).replace('+', '%20') + + def to_url(self): + """Serialize as a URL for a GET request.""" + base_url = urlparse.urlparse(self.url) + try: + query = base_url.query + except AttributeError: + # must be python <2.5 + query = base_url[4] + query = parse_qs(query) + for k, v in self.items(): + query.setdefault(k, []).append(v) + + try: + scheme = base_url.scheme + netloc = base_url.netloc + path = base_url.path + params = base_url.params + fragment = base_url.fragment + except AttributeError: + # must be python <2.5 + scheme = base_url[0] + netloc = base_url[1] + path = base_url[2] + params = base_url[3] + fragment = base_url[5] + + url = (scheme, netloc, path, params, + urllib.urlencode(query, True), fragment) + return urlparse.urlunparse(url) + + def get_parameter(self, parameter): + ret = self.get(parameter) + if ret is None: + raise Error('Parameter not found: %s' % parameter) + + return ret + + def get_normalized_parameters(self): + """Return a string that contains the parameters that must be signed.""" + items = [] + for key, value in self.iteritems(): + if key == 'oauth_signature': + continue + # 1.0a/9.1.1 states that kvp must be sorted by key, then by value, + # so we unpack sequence values into multiple items for sorting. + if isinstance(value, basestring): + items.append((to_utf8_if_string(key), to_utf8(value))) + else: + try: + value = list(value) + except TypeError, e: + assert 'is not iterable' in str(e) + items.append((to_utf8_if_string(key), to_utf8_if_string(value))) + else: + items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value) + + # Include any query string parameters from the provided URL + query = urlparse.urlparse(self.url)[4] + + url_items = self._split_url_string(query).items() + url_items = [(to_utf8(k), to_utf8(v)) for k, v in url_items if k != 'oauth_signature' ] + items.extend(url_items) + + items.sort() + encoded_str = urllib.urlencode(items) + # Encode signature parameters per Oauth Core 1.0 protocol + # spec draft 7, section 3.6 + # (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6) + # Spaces must be encoded with "%20" instead of "+" + return encoded_str.replace('+', '%20').replace('%7E', '~') + + def sign_request(self, signature_method, consumer, token): + """Set the signature parameter to the result of sign.""" + + if not self.is_form_encoded: + # according to + # http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html + # section 4.1.1 "OAuth Consumers MUST NOT include an + # oauth_body_hash parameter on requests with form-encoded + # request bodies." + self['oauth_body_hash'] = base64.b64encode(sha(self.body).digest()) + + if 'oauth_consumer_key' not in self: + self['oauth_consumer_key'] = consumer.key + + if token and 'oauth_token' not in self: + self['oauth_token'] = token.key + + self['oauth_signature_method'] = signature_method.name + self['oauth_signature'] = signature_method.sign(self, consumer, token) + + @classmethod + def make_timestamp(cls): + """Get seconds since epoch (UTC).""" + return str(int(time.time())) + + @classmethod + def make_nonce(cls): + """Generate pseudorandom number.""" + return str(random.randint(0, 100000000)) + + @classmethod + def from_request(cls, http_method, http_url, headers=None, parameters=None, + query_string=None): + """Combines multiple parameter sources.""" + if parameters is None: + parameters = {} + + # Headers + if headers and 'Authorization' in headers: + auth_header = headers['Authorization'] + # Check that the authorization header is OAuth. + if auth_header[:6] == 'OAuth ': + auth_header = auth_header[6:] + try: + # Get the parameters from the header. + header_params = cls._split_header(auth_header) + parameters.update(header_params) + except: + raise Error('Unable to parse OAuth parameters from ' + 'Authorization header.') + + # GET or POST query string. + if query_string: + query_params = cls._split_url_string(query_string) + parameters.update(query_params) + + # URL parameters. + param_str = urlparse.urlparse(http_url)[4] # query + url_params = cls._split_url_string(param_str) + parameters.update(url_params) + + if parameters: + return cls(http_method, http_url, parameters) + + return None + + @classmethod + def from_consumer_and_token(cls, consumer, token=None, + http_method=HTTP_METHOD, http_url=None, parameters=None, + body='', is_form_encoded=False): + if not parameters: + parameters = {} + + defaults = { + 'oauth_consumer_key': consumer.key, + 'oauth_timestamp': cls.make_timestamp(), + 'oauth_nonce': cls.make_nonce(), + 'oauth_version': cls.version, + } + + defaults.update(parameters) + parameters = defaults + + if token: + parameters['oauth_token'] = token.key + if token.verifier: + parameters['oauth_verifier'] = token.verifier + + return Request(http_method, http_url, parameters, body=body, + is_form_encoded=is_form_encoded) + + @classmethod + def from_token_and_callback(cls, token, callback=None, + http_method=HTTP_METHOD, http_url=None, parameters=None): + + if not parameters: + parameters = {} + + parameters['oauth_token'] = token.key + + if callback: + parameters['oauth_callback'] = callback + + return cls(http_method, http_url, parameters) + + @staticmethod + def _split_header(header): + """Turn Authorization: header into parameters.""" + params = {} + parts = header.split(',') + for param in parts: + # Ignore realm parameter. + if param.find('realm') > -1: + continue + # Remove whitespace. + param = param.strip() + # Split key-value. + param_parts = param.split('=', 1) + # Remove quotes and unescape the value. + params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"')) + return params + + @staticmethod + def _split_url_string(param_str): + """Turn URL string into parameters.""" + parameters = parse_qs(param_str.encode('utf-8'), keep_blank_values=True) + for k, v in parameters.iteritems(): + parameters[k] = urllib.unquote(v[0]) + return parameters + + +class Client(httplib2.Http): + """OAuthClient is a worker to attempt to execute a request.""" + + def __init__(self, consumer, token=None, cache=None, timeout=None, + proxy_info=None): + + if consumer is not None and not isinstance(consumer, Consumer): + raise ValueError("Invalid consumer.") + + if token is not None and not isinstance(token, Token): + raise ValueError("Invalid token.") + + self.consumer = consumer + self.token = token + self.method = SignatureMethod_HMAC_SHA1() + + httplib2.Http.__init__(self, cache=cache, timeout=timeout, proxy_info=proxy_info) + + def set_signature_method(self, method): + if not isinstance(method, SignatureMethod): + raise ValueError("Invalid signature method.") + + self.method = method + + def request(self, uri, method="GET", body='', headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None): + DEFAULT_POST_CONTENT_TYPE = 'application/x-www-form-urlencoded' + + if not isinstance(headers, dict): + headers = {} + + if method == "POST": + headers['Content-Type'] = headers.get('Content-Type', + DEFAULT_POST_CONTENT_TYPE) + + is_form_encoded = \ + headers.get('Content-Type') == 'application/x-www-form-urlencoded' + + if is_form_encoded and body: + parameters = parse_qs(body) + else: + parameters = None + + req = Request.from_consumer_and_token(self.consumer, + token=self.token, http_method=method, http_url=uri, + parameters=parameters, body=body, is_form_encoded=is_form_encoded) + + req.sign_request(self.method, self.consumer, self.token) + + schema, rest = urllib.splittype(uri) + if rest.startswith('//'): + hierpart = '//' + else: + hierpart = '' + host, rest = urllib.splithost(rest) + + realm = schema + ':' + hierpart + host + + if is_form_encoded: + body = req.to_postdata() + elif method == "GET": + uri = req.to_url() + else: + headers.update(req.to_header(realm=realm)) + + return httplib2.Http.request(self, uri, method=method, body=body, + headers=headers, redirections=redirections, + connection_type=connection_type) + + +class Server(object): + """A skeletal implementation of a service provider, providing protected + resources to requests from authorized consumers. + + This class implements the logic to check requests for authorization. You + can use it with your web server or web framework to protect certain + resources with OAuth. + """ + + timestamp_threshold = 300 # In seconds, five minutes. + version = OAUTH_VERSION + signature_methods = None + + def __init__(self, signature_methods=None): + self.signature_methods = signature_methods or {} + + def add_signature_method(self, signature_method): + self.signature_methods[signature_method.name] = signature_method + return self.signature_methods + + def verify_request(self, request, consumer, token): + """Verifies an api call and checks all the parameters.""" + + self._check_version(request) + self._check_signature(request, consumer, token) + parameters = request.get_nonoauth_parameters() + return parameters + + def build_authenticate_header(self, realm=''): + """Optional support for the authenticate header.""" + return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} + + def _check_version(self, request): + """Verify the correct version of the request for this server.""" + version = self._get_version(request) + if version and version != self.version: + raise Error('OAuth version %s not supported.' % str(version)) + + def _get_version(self, request): + """Return the version of the request for this server.""" + try: + version = request.get_parameter('oauth_version') + except: + version = OAUTH_VERSION + + return version + + def _get_signature_method(self, request): + """Figure out the signature with some defaults.""" + try: + signature_method = request.get_parameter('oauth_signature_method') + except: + signature_method = SIGNATURE_METHOD + + try: + # Get the signature method object. + signature_method = self.signature_methods[signature_method] + except: + signature_method_names = ', '.join(self.signature_methods.keys()) + raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names)) + + return signature_method + + def _get_verifier(self, request): + return request.get_parameter('oauth_verifier') + + def _check_signature(self, request, consumer, token): + timestamp, nonce = request._get_timestamp_nonce() + self._check_timestamp(timestamp) + signature_method = self._get_signature_method(request) + + try: + signature = request.get_parameter('oauth_signature') + except: + raise MissingSignature('Missing oauth_signature.') + + # Validate the signature. + valid = signature_method.check(request, consumer, token, signature) + + if not valid: + key, base = signature_method.signing_base(request, consumer, token) + + raise Error('Invalid signature. Expected signature base ' + 'string: %s' % base) + + def _check_timestamp(self, timestamp): + """Verify that timestamp is recentish.""" + timestamp = int(timestamp) + now = int(time.time()) + lapsed = now - timestamp + if lapsed > self.timestamp_threshold: + raise Error('Expired timestamp: given %d and now %s has a ' + 'greater difference than threshold %d' % (timestamp, now, + self.timestamp_threshold)) + + +class SignatureMethod(object): + """A way of signing requests. + + The OAuth protocol lets consumers and service providers pick a way to sign + requests. This interface shows the methods expected by the other `oauth` + modules for signing requests. Subclass it and implement its methods to + provide a new way to sign requests. + """ + + def signing_base(self, request, consumer, token): + """Calculates the string that needs to be signed. + + This method returns a 2-tuple containing the starting key for the + signing and the message to be signed. The latter may be used in error + messages to help clients debug their software. + + """ + raise NotImplementedError + + def sign(self, request, consumer, token): + """Returns the signature for the given request, based on the consumer + and token also provided. + + You should use your implementation of `signing_base()` to build the + message to sign. Otherwise it may be less useful for debugging. + + """ + raise NotImplementedError + + def check(self, request, consumer, token, signature): + """Returns whether the given signature is the correct signature for + the given consumer and token signing the given request.""" + built = self.sign(request, consumer, token) + return built == signature + + +class SignatureMethod_HMAC_SHA1(SignatureMethod): + name = 'HMAC-SHA1' + + def signing_base(self, request, consumer, token): + if not hasattr(request, 'normalized_url') or request.normalized_url is None: + raise ValueError("Base URL for request is not set.") + + sig = ( + escape(request.method), + escape(request.normalized_url), + escape(request.get_normalized_parameters()), + ) + + key = '%s&' % escape(consumer.secret) + if token: + key += escape(token.secret) + raw = '&'.join(sig) + return key, raw + + def sign(self, request, consumer, token): + """Builds the base signature string.""" + key, raw = self.signing_base(request, consumer, token) + + hashed = hmac.new(key, raw, sha) + + # Calculate the digest base 64. + return binascii.b2a_base64(hashed.digest())[:-1] + + +class SignatureMethod_PLAINTEXT(SignatureMethod): + + name = 'PLAINTEXT' + + def signing_base(self, request, consumer, token): + """Concatenates the consumer key and secret with the token's + secret.""" + sig = '%s&' % escape(consumer.secret) + if token: + sig = sig + escape(token.secret) + return sig, sig + + def sign(self, request, consumer, token): + key, raw = self.signing_base(request, consumer, token) + return raw diff --git a/lib/oauth2/_version.py b/lib/oauth2/_version.py new file mode 100644 index 0000000..9d779ea --- /dev/null +++ b/lib/oauth2/_version.py @@ -0,0 +1,18 @@ +# This is the version of this source code. + +manual_verstr = "1.5" + + + +auto_build_num = "211" + + + +verstr = manual_verstr + "." + auto_build_num +try: + from pyutil.version_class import Version as pyutil_Version + __version__ = pyutil_Version(verstr) +except (ImportError, ValueError): + # Maybe there is no pyutil installed. + from distutils.version import LooseVersion as distutils_Version + __version__ = distutils_Version(verstr) diff --git a/lib/oauth2/clients/__init__.py b/lib/oauth2/clients/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/oauth2/clients/imap.py b/lib/oauth2/clients/imap.py new file mode 100644 index 0000000..68b7cd8 --- /dev/null +++ b/lib/oauth2/clients/imap.py @@ -0,0 +1,40 @@ +""" +The MIT License + +Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import oauth2 +import imaplib + + +class IMAP4_SSL(imaplib.IMAP4_SSL): + """IMAP wrapper for imaplib.IMAP4_SSL that implements XOAUTH.""" + + def authenticate(self, url, consumer, token): + if consumer is not None and not isinstance(consumer, oauth2.Consumer): + raise ValueError("Invalid consumer.") + + if token is not None and not isinstance(token, oauth2.Token): + raise ValueError("Invalid token.") + + imaplib.IMAP4_SSL.authenticate(self, 'XOAUTH', + lambda x: oauth2.build_xoauth_string(url, consumer, token)) diff --git a/lib/oauth2/clients/smtp.py b/lib/oauth2/clients/smtp.py new file mode 100644 index 0000000..3e7bf0b --- /dev/null +++ b/lib/oauth2/clients/smtp.py @@ -0,0 +1,41 @@ +""" +The MIT License + +Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import oauth2 +import smtplib +import base64 + + +class SMTP(smtplib.SMTP): + """SMTP wrapper for smtplib.SMTP that implements XOAUTH.""" + + def authenticate(self, url, consumer, token): + if consumer is not None and not isinstance(consumer, oauth2.Consumer): + raise ValueError("Invalid consumer.") + + if token is not None and not isinstance(token, oauth2.Token): + raise ValueError("Invalid token.") + + self.docmd('AUTH', 'XOAUTH %s' % \ + base64.b64encode(oauth2.build_xoauth_string(url, consumer, token))) diff --git a/lib/yql/AUTHORS b/lib/yql/AUTHORS new file mode 100644 index 0000000..93e3fb6 --- /dev/null +++ b/lib/yql/AUTHORS @@ -0,0 +1,6 @@ +* Stuart Colville + +* Cyril Doussin +* Diogo Baeder +* Jannis Leidel +* *YOUR NAME HERE* diff --git a/lib/yql/LICENSE b/lib/yql/LICENSE new file mode 100644 index 0000000..9ab5f72 --- /dev/null +++ b/lib/yql/LICENSE @@ -0,0 +1,27 @@ +:: + + Copyright (c) 2009, Stuart Colville, Muffin Research Labs + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of "Muffin Research Labs" nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY "Muffin Research Labs" ''AS IS'' AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL "Muffin Research Labs" BE LIABLE FOR ANY + DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/lib/yql/__init__.py b/lib/yql/__init__.py new file mode 100644 index 0000000..d268d7a --- /dev/null +++ b/lib/yql/__init__.py @@ -0,0 +1,640 @@ +""" +Python YQL +========== + +YQL client for Python + +Author: Stuart Colville http://muffinresearch.co.uk/ +Docs at: http://python-yql.org/ + +TODO: More granular error handling + +""" +import json +import re +import time +import pprint +from urlparse import urlparse +from urllib import urlencode +from httplib2 import Http + +from yql.utils import get_http_method, clean_url, clean_query +from yql.logger import get_logger +import oauth2 as oauth + +try: + from urlparse import parse_qs, parse_qsl +except ImportError: # pragma: no cover + from cgi import parse_qs, parse_qsl + + +__author__ = 'Stuart Colville' +__version__ = '0.7.5' +__all__ = ['Public', 'TwoLegged', 'ThreeLegged'] + + +QUERY_PLACEHOLDER = re.compile(r"[ =]@(?P[a-z].*?\b)", re.IGNORECASE) + + +REQUEST_TOKEN_URL = 'https://api.login.yahoo.com/oauth/v2/get_request_token' +ACCESS_TOKEN_URL = 'https://api.login.yahoo.com/oauth/v2/get_token' +AUTHORIZATION_URL = 'https://api.login.yahoo.com/oauth/v2/request_auth' + + +PUBLIC_ENDPOINT = "query.yahooapis.com/v1/public/yql" +PRIVATE_ENDPOINT = "query.yahooapis.com/v1/yql" +HTTP_SCHEME = "http:" +HTTPS_SCHEME = "https:" + + +yql_logger = get_logger() + + +class YQLObj(object): + """A YQLObject is the object created as the result of a YQL query""" + + def __init__(self, result_dict): + """Init query object""" + self._raw = result_dict and result_dict.get('query') or {} + + @property + def raw(self): + """The raw data response""" + return self._raw + + @property + def uri(self): + """The uri used to query the YQL API""" + return self._raw.get('uri') + + @property + def query_params(self): + """The query parameters of the uri used to call the YQL API""" + if self.uri: + q_string = urlparse(self.uri)[4] + return dict(parse_qsl(q_string)) + else: + return {} + + @property + def results(self): + """The query results dict.""" + return self._raw.get('results') + + def one(self): + """Return just one result directly.""" + rows = self.rows + if len(rows) > 1: + raise NotOneError, "More than one result" + else: + return rows[0] + + @property + def rows(self): + """Get a list of rows returned by the query. + + Results is a dict with one key but that key changes depending on the results + This provides a way of getting at the rows list in an arbitrary way. + + Added in version: 0.6 fixes results with 1 item so that they are still + returned within a list. + + """ + result = [] + if self.results: + vals = self.results.values() + if len(vals) == 1: + result = self.results.values()[0] + + if self.count == 1 and result: + result = [result] + + return result + + @property + def query(self): + """The YQL query""" + return self.query_params.get('q') + + @property + def lang(self): + """The language""" + return self._raw.get('lang') + + @property + def count(self): + """The results count""" + count = self._raw.get('count') + if count: + return int(count) + + @property + def diagnostics(self): + """The query diagnostics""" + return self._raw.get('diagnostics') + + def pprint_raw(self, indent=4): # pragma: no cover + """Pretty print the raw data""" + pprint.pprint(self._raw, indent=indent) + + def pformat_raw(self, indent=4): # pragma: no cover + """Pretty format the raw data""" + return pprint.pformat(self._raw, indent=indent) + + +class YQLError(Exception): + """Default Error""" + + def __init__(self, resp, content, url=None, query=None): + yql_logger.error("%s", content) + yql_logger.error("Error Response: %s", resp) + yql_logger.error("Error url: %s", url) + self.response = resp + self.content = content + self.url = url + self.query = query + + def __str__(self): + """Return the error message. + + Attempt to parse the json if it fails + simply return the content attribute instead. + + """ + try: + content = json.loads(self.content) + except: + content = {} + + if content and content.get("error") and content["error"].get( + "description"): + return content['error']['description'] + else: + if isinstance(self.content, basestring): + return self.content + else: + return repr(self.content) + + +class NotOneError(Exception): + """Not One Error.""" + + def __init__(self, message): + self.message = message + + def __str__(self): + """Return the error message""" + return self.message + + +class Public(object): + """Class for making public YQL queries""" + + def __init__(self, api_key=None, shared_secret=None, httplib2_inst=None): + """Init the base class. + + Optionally you can pass in an httplib2 instance which allows you + to set-up the instance in a different way for your own uses. + + Also it's very helpful in a testing scenario. + + """ + self.api_key = api_key + self.secret = shared_secret + self.http = httplib2_inst or Http() + self.scheme = HTTPS_SCHEME + self.__endpoint = PUBLIC_ENDPOINT + self.uri = self.get_endpoint_uri() + + def get_endpoint_uri(self): + """Get endpoint""" + return "http://%s" % self.endpoint + + def get_endpoint(self): + """Gets the endpoint for requests""" + return self.__endpoint + + def set_endpoint(self, value): + """Sets the endpoint and updates the uri""" + if value in (PRIVATE_ENDPOINT, PUBLIC_ENDPOINT): + self.__endpoint = value + self.uri = self.get_endpoint_uri() + else: + raise ValueError, "Invalid endpoint: %s" % value + + + def get_query_params(self, query, params, **kwargs): + """Get the query params and validate placeholders""" + query_params = {} + keys_from_query = self.get_placeholder_keys(query) + + if keys_from_query and not params or ( + params and not hasattr(params, 'get')): + + raise ValueError, "If you are using placeholders a dictionary "\ + "of substitutions is required" + + elif not keys_from_query and params and hasattr(params, 'get'): + raise ValueError, "You supplied a dictionary of substitutions "\ + "but the query doesn't have any placeholders" + + elif keys_from_query and params: + keys_from_params = params.keys() + + if set(keys_from_query) != set(keys_from_params): + raise ValueError, "Parameter keys don't match the query "\ + "placeholders" + else: + query_params.update(params) + + query_params['q'] = query + query_params['format'] = 'json' + + env = kwargs.get('env') + if env: + query_params['env'] = env + + return query_params + + @staticmethod + def get_placeholder_keys(query): + """Gets the @var placeholders + + http://developer.yahoo.com/yql/guide/var_substitution.html + + """ + result = [] + for match in QUERY_PLACEHOLDER.finditer(query): + result.append(match.group('param')) + + if result: + yql_logger.debug("placeholder_keys: %s", result) + + return result + + def get_uri(self, query, params=None, **kwargs): + """Get the the request url""" + params = self.get_query_params(query, params, **kwargs) + query_string = urlencode(params) + uri = '%s?%s' % (self.uri, query_string) + uri = clean_url(uri) + return uri + + def execute(self, query, params=None, **kwargs): + """Execute YQL query""" + query = clean_query(query) + url = self.get_uri(query, params, **kwargs) + # Just in time change to https avoids + # invalid oauth sigs + if self.scheme == HTTPS_SCHEME: + url = url.replace(HTTP_SCHEME, HTTPS_SCHEME) + yql_logger.debug("executed url: %s", url) + http_method = get_http_method(query) + if http_method in ["DELETE", "PUT", "POST"]: + data = {"q": query} + + # Encode as json and set Content-Type header + # to reflect we are sending JSON + # Fixes LP: 629064 + data = json.dumps(data) + headers = {"Content-Type": "application/json"} + resp, content = self.http.request( + url, http_method, headers=headers, body=data) + yql_logger.debug("body: %s", data) + else: + resp, content = self.http.request(url, http_method) + yql_logger.debug("http_method: %s", http_method) + if resp.get('status') == '200': + return YQLObj(json.loads(content)) + else: + raise YQLError, (resp, content) + + endpoint = property(get_endpoint, set_endpoint) + + +class TwoLegged(Public): + """Two legged Auth is simple request which is signed prior to sending""" + + def __init__(self, api_key, shared_secret, httplib2_inst=None): + """Override init to ensure required args""" + super(TwoLegged, self).__init__(api_key, shared_secret, httplib2_inst) + self.endpoint = PRIVATE_ENDPOINT + self.scheme = HTTPS_SCHEME + self.hmac_sha1_signature = oauth.SignatureMethod_HMAC_SHA1() + self.plaintext_signature = oauth.SignatureMethod_PLAINTEXT() + + @staticmethod + def get_base_params(): + """Set-up the basic parameters needed for a request""" + + params = {} + params['oauth_version'] = "1.0" + params['oauth_nonce'] = oauth.generate_nonce() + params['oauth_timestamp'] = int(time.time()) + return params + + + def __two_legged_request(self, resource_url, parameters=None, method=None): + """Sign a request for two-legged authentication""" + + params = self.get_base_params() + if parameters: + params.update(parameters) + + yql_logger.debug("params: %s", params) + yql_logger.debug("resource_url: %s", resource_url) + if not method: + method = "GET" + + consumer = oauth.Consumer(self.api_key, self.secret) + request = oauth.Request(method=method, url=resource_url, + parameters=params) + request.sign_request(self.hmac_sha1_signature, consumer, None) + return request + + + def get_uri(self, query, params=None, **kwargs): + """Get the the request url""" + query_params = self.get_query_params(query, params, **kwargs) + + http_method = get_http_method(query) + request = self.__two_legged_request(self.uri, + parameters=query_params, method=http_method) + uri = "%s?%s" % (self.uri, request.to_postdata()) + uri = clean_url(uri) + return uri + + +class ThreeLegged(TwoLegged): + + """ + Three-legged Auth is used when it involves private data such as a + user's contacts. + + Three-legged auth is most likely to be used in a web-site or + web-accessible application. Three-legged auth requires the user + to authenticate the request through the Yahoo login. + + Three-legged auth requires the implementation to: + + * Request a token + * Get a authentication url + * User uses the auth url to login which will redirect to a callback + or shows a verfier string on screen + * Verifier is read at the callback url or manually provided to get + the access token + * resources is access + + For an implementation this will require calling the following methods + in order the first time the user needs to authenticate + + * :meth:`get_token_and_auth_url` (returns a token and the auth url) + * get verifier through callback or from screen + * :meth:`get_access_token` (returns the access token) + * :meth:`execute` - makes the request to the protected resource. + + Once the access token has been provided subsequent requests can re-use it. + + Access tokens expire after 1 hour, however they can be refreshed with + the :meth:`refresh_token` method + + + """ + + def __init__(self, api_key, shared_secret, httplib2_inst=None): + """Override init to add consumer""" + super(ThreeLegged, self).__init__( + api_key, shared_secret, httplib2_inst) + + self.scheme = HTTP_SCHEME + self.endpoint = PRIVATE_ENDPOINT + self.consumer = oauth.Consumer(self.api_key, self.secret) + + def get_token_and_auth_url(self, callback_url=None): + """First step is to get the token and then send the request that + provides the auth URL + + Returns a tuple of token and the authorisation URL. + + """ + + client = oauth.Client(self.consumer) + + params = {} + params['oauth_callback'] = callback_url or 'oob' + + request = oauth.Request(parameters=params) + url = REQUEST_TOKEN_URL + resp, content = client.request(url, "POST", request.to_postdata()) + + if resp.get('status') == '200': + token = oauth.Token.from_string(content) + yql_logger.debug("token: %s", token) + data = dict(parse_qsl(content)) + yql_logger.debug("data: %s", data) + return token, data['xoauth_request_auth_url'] + else: + raise YQLError, (resp, content, url) + + + def get_access_token(self, token, verifier): + + """Get the access token + + The verifier (required) should have been provided to the + user following login to at the url returned + by the :meth:`get_token_and_auth_url` method. + + If not you will need need to extract the auth_verifier + parameter from your callback url on the site where you + are implementing 3-legged auth in order to pass it to this + function. + + The access token can be stored and re-used for subsequent + calls. + + The stored token will also need to be refreshed periodically + with :meth:`refresh_token` + + """ + + params = {} + params['oauth_verifier'] = verifier + + oauth_request = oauth.Request.from_consumer_and_token( + self.consumer, token=token, + http_url=ACCESS_TOKEN_URL, + http_method="POST", + parameters=params) + + yql_logger.debug("oauth_request: %s", oauth_request) + oauth_request.sign_request( + self.hmac_sha1_signature, self.consumer, token) + + url = oauth_request.to_url() + + yql_logger.debug("oauth_url: %s", url) + postdata = oauth_request.to_postdata() + yql_logger.debug("oauth_postdata: %s", postdata) + resp, content = self.http.request(url, "POST", postdata) + + if resp.get('status') == '200': + access_token = YahooToken.from_string(content) + access_token.timestamp = oauth_request['oauth_timestamp'] + return access_token + else: + raise YQLError, (resp, content, url) + + + def check_token(self, token): + """Check to see if a token has expired""" + + if not hasattr(token, 'timestamp'): + raise AttributeError, 'token doesn\'t have a timestamp attrbute' + + if (int(token.timestamp) + 3600) < time.time(): + token = self.refresh_token(token) + + return token + + + def refresh_token(self, token): + """Access Tokens only last for one hour from the point of being issued. + + When a token has expired it needs to be refreshed this method takes an + expired token and refreshes it. + + token parameter can be either a token object or a token string. + + """ + if not hasattr(token, "key"): + token = YahooToken.from_string(token) + + params = self.get_base_params() + params['oauth_token'] = token.key + params['oauth_token_secret'] = token.secret + params['oauth_session_handle'] = token.session_handle + + oauth_request = oauth.Request.from_consumer_and_token( + self.consumer, token=token, + http_url=ACCESS_TOKEN_URL, + http_method="POST", + parameters=params) + + yql_logger.debug("oauth_request: %s", oauth_request) + oauth_request.sign_request( + self.hmac_sha1_signature, self.consumer, token) + + url = oauth_request.to_url() + yql_logger.debug("oauth_url: %s", url) + postdata = oauth_request.to_postdata() + yql_logger.debug("oauth_postdata: %s", postdata) + resp, content = self.http.request(url, "POST", postdata) + + if resp.get('status') == '200': + access_token = YahooToken.from_string(content) + yql_logger.debug("oauth_access_token: %s", access_token) + access_token.timestamp = oauth_request['oauth_timestamp'] + return access_token + else: + raise YQLError, (resp, content, url) + + def get_uri(self, query, params=None, **kwargs): + """Get the the request url""" + query_params = self.get_query_params(query, params, **kwargs) + + token = kwargs.get("token") + + if hasattr(token, "yahoo_guid"): + query_params["oauth_yahoo_guid"] = getattr(token, "yahoo_guid") + + if not token: + raise ValueError, "Without a token three-legged-auth cannot be"\ + " carried out" + + yql_logger.debug("query_params: %s", query_params) + http_method = get_http_method(query) + oauth_request = oauth.Request.from_consumer_and_token( + self.consumer, http_url=self.uri, + token=token, parameters=query_params, + http_method=http_method) + yql_logger.debug("oauth_request: %s", oauth_request) + # Sign request + oauth_request.sign_request( + self.hmac_sha1_signature, self.consumer, token) + + yql_logger.debug("oauth_signed_request: %s", oauth_request) + uri = "%s?%s" % (self.uri, oauth_request.to_postdata()) + return uri.replace('+', '%20').replace('%7E', '~') + + +class YahooToken(oauth.Token): + """A subclass of oauth.Token with the addition of a place to + stash the session_handler which is required for token refreshing + + """ + + @staticmethod + def from_string(data_string): + """Deserializes a token from a string like one returned by + + `to_string()`.""" + + if not len(data_string): + raise ValueError("Invalid parameter string.") + + params = parse_qs(data_string, keep_blank_values=False) + if not len(params): + raise ValueError("Invalid parameter string.") + + try: + key = params['oauth_token'][0] + except Exception: + raise ValueError("'oauth_token' not found in OAuth request.") + + try: + secret = params['oauth_token_secret'][0] + except Exception: + raise ValueError("'oauth_token_secret' not found in " + "OAuth request.") + + token = YahooToken(key, secret) + + session_handle = params.get('oauth_session_handle') + if session_handle: + setattr(token, 'session_handle', session_handle[0]) + + timestamp = params.get('token_creation_timestamp') + if timestamp: + setattr(token, 'timestamp', timestamp[0]) + + try: + token.callback_confirmed = params['oauth_callback_confirmed'][0] + except KeyError: + pass # 1.0, no callback confirmed. + + return token + + + def to_string(self): + """Returns this token as a plain string, suitable for storage. + The resulting string includes the token's secret, so you should never + send or store this string where a third party can read it. + + """ + + data = { + 'oauth_token': self.key, + 'oauth_token_secret': self.secret, + } + + if hasattr(self, 'session_handle'): + data['oauth_session_handle'] = self.session_handle + + if hasattr(self, 'timestamp'): + data['token_creation_timestamp'] = self.timestamp + + if self.callback_confirmed is not None: + data['oauth_callback_confirmed'] = self.callback_confirmed + + return urlencode(data) diff --git a/lib/yql/logger.py b/lib/yql/logger.py new file mode 100644 index 0000000..1069ca9 --- /dev/null +++ b/lib/yql/logger.py @@ -0,0 +1,44 @@ +"""Logging for Python YQL.""" + +import os +import logging +import logging.handlers + + +LOG_DIRECTORY_DEFAULT = os.path.join(os.path.dirname(__file__), "../logs") +LOG_DIRECTORY = os.environ.get("YQL_LOG_DIR", LOG_DIRECTORY_DEFAULT) +LOG_LEVELS = {'debug': logging.DEBUG, + 'info': logging.INFO, + 'warning': logging.WARNING, + 'error': logging.ERROR, + 'critical': logging.CRITICAL} + +LOG_LEVEL = os.environ.get("YQL_LOGGING_LEVEL", 'debug') +LOG_FILENAME = os.path.join(LOG_DIRECTORY, "python-yql.log") +MAX_BYTES = 1024 * 1024 + +log_level = LOG_LEVELS.get(LOG_LEVEL) +yql_logger = logging.getLogger("python-yql") +yql_logger.setLevel(LOG_LEVELS.get(LOG_LEVEL)) + + +class NullHandler(logging.Handler): + def emit(self, record): + pass + + +def get_logger(): + """Set-upt the logger if enabled or fallback to NullHandler.""" + if os.environ.get("YQL_LOGGING", False): + if not os.path.exists(LOG_DIRECTORY): + os.mkdir(LOG_DIRECTORY) + log_handler = logging.handlers.RotatingFileHandler( + LOG_FILENAME, maxBytes=MAX_BYTES, + backupCount=5) + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s") + log_handler.setFormatter(formatter) + else: + log_handler = NullHandler() + yql_logger.addHandler(log_handler) + return yql_logger diff --git a/lib/yql/storage.py b/lib/yql/storage.py new file mode 100644 index 0000000..3e4691d --- /dev/null +++ b/lib/yql/storage.py @@ -0,0 +1,72 @@ +import os +from hashlib import md5 + +from yql import YahooToken + +SECRET = "FDHSJLUREIRPpieruweruwoeirhfsdjf" + + +class TokenStoreError(Exception): + """Generic token storage""" + pass + + +class BaseTokenStore(object): + """Base class for storage""" + + def set(self, name, token): + raise NotImplementedError + + def get(self, name): + raise NotImplementedError + + +class FileTokenStore(BaseTokenStore): + """A simple filesystem based token store + + Note: this is more intended as an example rather than + something for heavy duty production usage. + + """ + + def __init__(self, dir_path, secret=None): + """Initialize token storage""" + + if not os.path.exists(dir_path): + raise TokenStoreError("Path is not valid") + + self.base_dir = dir_path + self.secret = secret or SECRET + + def get_filepath(self, name): + """Build filepath""" + + filename = md5("%s%s" % (name, self.secret)).hexdigest() + filepath = os.path.join(self.base_dir, filename) + + return filepath + + def set(self, name, token): + """Write a token to file""" + + if hasattr(token, 'key'): + token = YahooToken.to_string(token) + + if token: + filepath = self.get_filepath(name) + f_handle = open(filepath, 'w') + f_handle.write(token) + f_handle.close() + + def get(self, name): + """Get a token from the filesystem""" + + filepath = self.get_filepath(name) + + if os.path.exists(filepath): + f_handle = open(filepath, 'r') + token = f_handle.read() + f_handle.close() + + token = YahooToken.from_string(token) + return token diff --git a/lib/yql/tests/__init__.py b/lib/yql/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/yql/tests/test_errors.py b/lib/yql/tests/test_errors.py new file mode 100644 index 0000000..aae33f8 --- /dev/null +++ b/lib/yql/tests/test_errors.py @@ -0,0 +1,29 @@ +import json +from unittest import TestCase + +from yql import NotOneError, YQLError + + +class YQLErrorTest(TestCase): + def test_error_passed_error_string(self): + error = YQLError(resp='some response', content='some content') + self.assertEqual("some content", str(error)) + + def test_error_passed_object(self): + error = YQLError(resp='some response', content={"foo": 1}) + self.assertEqual(repr({"foo": 1}), str(error)) + + def test_error_passed_json(self): + content = { + 'error': { + 'description': 'some description', + } + } + error = YQLError(resp='some response', content=json.dumps(content)) + self.assertEqual("some description", str(error)) + + +class NotOneErrorTest(TestCase): + def test_is_represented_by_message_as_json(self): + error = NotOneError('some message') + self.assertEqual("some message", str(error)) diff --git a/lib/yql/tests/test_live_services.py b/lib/yql/tests/test_live_services.py new file mode 100644 index 0000000..4e35ab4 --- /dev/null +++ b/lib/yql/tests/test_live_services.py @@ -0,0 +1,155 @@ +"""Tests against live services. + +*** SKIPPED BY DEFAULT *** + +These tests won't normally be run, as part of the main test suite but are run by +our hudson instance to tell us should Yahoo's API change in some way that will +break python-yql. + +Note to end-users: These tests are dependent on defining a secrets file with API +keys and other secrets which are required to carry out these tests. + +If the secrets file isn't present the tests are skipped + +""" +import os +import sys +from time import time +from unittest import TestCase + +from nose.plugins.skip import SkipTest + +import yql +from yql.storage import FileTokenStore + + +SECRETS_DIR = os.path.join(os.path.dirname(__file__), "../../../secrets") +CACHE_DIR = os.path.abspath(os.path.join(SECRETS_DIR, "cache")) + +try: + if SECRETS_DIR not in sys.path: + sys.path.append(SECRETS_DIR) + + from secrets import * +except ImportError: + raise SkipTest("Unable to find secrets directory") + + +class LiveTestCase(TestCase): + """A test case containing live tests""" + + def test_write_bitly_url(self): + """Test writing bit.ly url""" + + query = """USE 'http://www.datatables.org/bitly/bit.ly.shorten.xml'; + SELECT * from bit.ly.shorten where login='%s' and apiKey='%s' and + longUrl='http://yahoo.com'""" % (BITLY_USER, BITLY_API_KEY) + + y = yql.TwoLegged(YQL_API_KEY, YQL_SHARED_SECRET) + res = y.execute(query) + assert res.one()["data"]["url"] == "http://yhoo.it/9PPTOr" + + def test_public_request(self): + """Test public two-legged request to flickr""" + query = """select * from flickr.photos.search where + text="panda" and api_key='%s' LIMIT 3""" % FLICKR_API_KEY + y = yql.TwoLegged(YQL_API_KEY, YQL_SHARED_SECRET) + res = y.execute(query) + assert len(res.rows) == 3 + + def test_two_legged_weather_select(self): + """Tests the weather tables using two-legged""" + query = """select * from weather.forecast where location in + (select id from xml where + url='http://xoap.weather.com/search/search?where=london' + and itemPath='search.loc')""" + y = yql.TwoLegged(YQL_API_KEY, YQL_SHARED_SECRET) + res = y.execute(query) + assert len(res.rows) > 1 + + def test_update_social_status(self): + """Updates status""" + y = yql.ThreeLegged(YQL_API_KEY, YQL_SHARED_SECRET) + + timestamp = time() + query = """UPDATE social.profile.status + SET status='Using YQL. %s Update' + WHERE guid=me""" % timestamp + + token_store = FileTokenStore(CACHE_DIR, secret='gsfdsfdsfdsfs') + stored_token = token_store.get('foo') + + if not stored_token: + # Do the dance + request_token, auth_url = y.get_token_and_auth_url() + print "Visit url %s and get a verifier string" % auth_url + verifier = raw_input("Enter the code: ") + token = y.get_access_token(request_token, verifier) + token_store.set('foo', token) + else: + # Check access_token is within 1hour-old and if not refresh it + # and stash it + token = y.check_token(stored_token) + if token != stored_token: + token_store.set('foo', token) + + res = y.execute(query, token=token) + assert res.rows[0] == "ok" + new_query = """select message from social.profile.status where guid=me""" + res = y.execute(new_query, token=token) + assert res.rows[0].get("message") == "Using YQL. %s Update" % timestamp + + def test_update_meme_status(self): + """Updates status""" + y = yql.ThreeLegged(YQL_API_KEY, YQL_SHARED_SECRET) + query = 'INSERT INTO meme.user.posts (type, content) VALUES("text", "test with pythonyql")' + token_store = FileTokenStore(CACHE_DIR, secret='fjdsfjllds') + + store_name = "meme" + stored_token = token_store.get(store_name) + if not stored_token: + # Do the dance + request_token, auth_url = y.get_token_and_auth_url() + print "Visit url %s and get a verifier string" % auth_url + verifier = raw_input("Enter the code: ") + token = y.get_access_token(request_token, verifier) + token_store.set(store_name, token) + else: + # Check access_token is within 1hour-old and if not refresh it + # and stash it + token = y.check_token(stored_token) + if token != stored_token: + token_store.set(store_name, token) + + # post a meme + res = y.execute(query, token=token) + assert y.uri == "http://query.yahooapis.com/v1/yql" + assert res.rows[0].get("message") == "ok" + + pubid = None + if res.rows[0].get("post") and res.rows[0]["post"].get("pubid"): + pubid = res.rows[0]["post"]["pubid"] + + # Delete the post we've just created + query = 'DELETE FROM meme.user.posts WHERE pubid=@pubid' + res2 = y.execute(query, token=token, params={"pubid": pubid}) + assert res2.rows[0].get("message") == "ok" + + def test_check_env_var(self): + """Testing env variable""" + y = yql.Public() + env = "http://datatables.org/alltables.env" + query = "SHOW tables;" + res = y.execute(query, env=env) + assert res.count >= 800 + + def test_xpath_works(self): + y = yql.Public() + query = """SELECT * FROM html + WHERE url='http://google.co.uk' + AND xpath="//input[contains(@name, 'q')]" + LIMIT 10""" + res = y.execute(query) + assert res.rows[0].get("title") == "Search" + + diff --git a/lib/yql/tests/test_logger.py b/lib/yql/tests/test_logger.py new file mode 100644 index 0000000..d6dfe49 --- /dev/null +++ b/lib/yql/tests/test_logger.py @@ -0,0 +1,23 @@ +import os +import shutil +from unittest import TestCase + +import yql.logger + + +class LoggerTest(TestCase): + def setUp(self): + self._logging = os.environ.get('YQL_LOGGING', '') + + def tearDown(self): + os.environ['YQL_LOGGING'] = self._logging + + def test_is_instantiated_even_if_log_dir_doesnt_exist(self): + os.environ['YQL_LOGGING'] = '1' + if os.path.exists(yql.logger.LOG_DIRECTORY): + shutil.rmtree(yql.logger.LOG_DIRECTORY) + yql.logger.get_logger() + + def test_logs_message_to_file(self): + os.environ['YQL_LOGGING'] = '1' + yql.logger.get_logger() diff --git a/lib/yql/tests/test_query_placeholders.py b/lib/yql/tests/test_query_placeholders.py new file mode 100644 index 0000000..5acf47b --- /dev/null +++ b/lib/yql/tests/test_query_placeholders.py @@ -0,0 +1,77 @@ +"""Set of tests for the placeholder checking""" + +from unittest import TestCase + +from nose.tools import raises + +import yql + + +class PublicTest(TestCase): + @raises(ValueError) + def test_empty_args_raises_valueerror(self): + y = yql.Public() + query = "SELECT * from foo where dog=@dog" + params = {} + y.execute(query, params) + + @raises(ValueError) + def test_incorrect_args_raises_valueerror(self): + y = yql.Public() + query = "SELECT * from foo where dog=@dog" + params = {'test': 'fail'} + y.execute(query, params) + + @raises(ValueError) + def test_params_raises_when_not_dict(self): + y = yql.Public() + query = "SELECT * from foo where dog=@dog" + params = ['test'] + y.execute(query, params) + + @raises(ValueError) + def test_unecessary_args_raises_valueerror(self): + y = yql.Public() + query = "SELECT * from foo where dog='test'" + params = {'test': 'fail'} + y.execute(query, params) + + @raises(ValueError) + def test_incorrect_type_raises_valueerror(self): + y = yql.Public() + query = "SELECT * from foo where dog=@test" + params = ('fail') + y.execute(query, params) + + def test_placeholder_regex_one(self): + y = yql.Public() + query = "SELECT * from foo where email='foo@foo.com'" + placeholders = y.get_placeholder_keys(query) + self.assertEqual(placeholders, []) + + def test_placeholder_regex_two(self): + y = yql.Public() + query = "SELECT * from foo where email=@foo'" + placeholders = y.get_placeholder_keys(query) + self.assertEqual(placeholders, ['foo']) + + def test_placeholder_regex_three(self): + y = yql.Public() + query = "SELECT * from foo where email=@foo and test=@bar'" + placeholders = y.get_placeholder_keys(query) + self.assertEqual(placeholders, ['foo', 'bar']) + + def test_placeholder_regex_four(self): + y = yql.Public() + query = "SELECT * from foo where foo='bar' LIMIT @foo" + placeholders = y.get_placeholder_keys(query) + self.assertEqual(placeholders, ['foo']) + + def test_placeholder_regex_five(self): + y = yql.Public() + query = """SELECT * from foo + where foo='bar' LIMIT + @foo""" + placeholders = y.get_placeholder_keys(query) + self.assertEqual(placeholders, ['foo']) + diff --git a/lib/yql/tests/test_requests_responses.py b/lib/yql/tests/test_requests_responses.py new file mode 100644 index 0000000..a4e87d4 --- /dev/null +++ b/lib/yql/tests/test_requests_responses.py @@ -0,0 +1,248 @@ +from email import message_from_file +import os +from unittest import TestCase +import urlparse +from urllib import urlencode +try: + from urlparse import parse_qsl +except ImportError: + from cgi import parse_qsl + +from nose.tools import raises +from nose import with_setup +import oauth2 as oauth +import httplib2 + +import yql + + +HTTP_SRC_DIR = os.path.join(os.path.dirname(__file__), "http_src/") + + +class FileDataHttpReplacement(object): + """Build a stand-in for httplib2.Http that takes its + response headers and bodies from files on disk + + http://bitworking.org/news/172/Test-stubbing-httplib2 + + """ + + def __init__(self, cache=None, timeout=None): + self.hit_counter = {} + + def request(self, uri, method="GET", body=None, headers=None, redirections=5): + path = urlparse.urlparse(uri)[2] + fname = os.path.join(HTTP_SRC_DIR, path[1:]) + + if not os.path.exists(fname): + index = self.hit_counter.get(fname, 1) + + if os.path.exists(fname + "." + str(index)): + self.hit_counter[fname] = index + 1 + fname = fname + "." + str(index) + + if os.path.exists(fname): + f = file(fname, "r") + response = message_from_file(f) + f.close() + body = response.get_payload() + response_headers = httplib2.Response(response) + return (response_headers, body) + else: + return (httplib2.Response({"status": "404"}), "") + + def add_credentials(self, name, password): + pass + + +class RequestDataHttpReplacement: + """Create an httplib stub that returns request data""" + + def __init__(self): + pass + + def request(self, uri, *args, **kwargs): + """return the request data""" + return uri, args, kwargs + + +class TestPublic(yql.Public): + """Subclass of YQL to allow returning of the request data""" + + execute = yql.Public.get_uri + + +class TestTwoLegged(yql.TwoLegged): + """Subclass of YQLTwoLegged to allow returning of the request data""" + + execute = yql.TwoLegged.get_uri + + +class TestThreeLegged(yql.ThreeLegged): + """Subclass of YQLTwoLegged to allow returning of the request data""" + + execute = yql.ThreeLegged.get_uri + + +class StubbedHttpTestCase(TestCase): + stub = None + + def setUp(self): + self._http = httplib2.Http + httplib2.Http = self.stub + + def tearDown(self): + httplib2.Http = self._http + + +class PublicStubbedRequestTest(StubbedHttpTestCase): + stub = RequestDataHttpReplacement + + def test_urlencoding_for_public_yql(self): + query = 'SELECT * from foo' + y = TestPublic(httplib2_inst=httplib2.Http()) + uri = y.execute(query) + self.assertEqual(uri, "http://query.yahooapis.com/v1/public/yql?q=SELECT+%2A+from+foo&format=json") + + def test_env_for_public_yql(self): + query = 'SELECT * from foo' + y = TestPublic(httplib2_inst=httplib2.Http()) + uri = y.execute(query, env="http://foo.com") + self.assertTrue(uri.find(urlencode({"env":"http://foo.com"})) > -1) + + def test_name_param_inserted_for_public_yql(self): + query = 'SELECT * from foo WHERE dog=@dog' + y = TestPublic(httplib2_inst=httplib2.Http()) + uri = y.execute(query, {"dog": "fifi"}) + self.assertTrue(uri.find('dog=fifi') >-1) + + +class PublicStubbedFromFileTest(StubbedHttpTestCase): + stub = FileDataHttpReplacement + + def test_json_response_from_file(self): + query = 'SELECT * from foo WHERE dog=@dog' + y = yql.Public(httplib2_inst=httplib2.Http()) + content = y.execute(query, {"dog": "fifi"}) + self.assertEqual(content.count, 3) + + +class TwoLeggedTest(TestCase): + @raises(TypeError) + def test_yql_with_2leg_auth_raises_typerror(self): + TestTwoLegged() + + def test_api_key_and_secret_attrs(self): + y = yql.TwoLegged('test-api-key', 'test-secret') + self.assertEqual(y.api_key, 'test-api-key') + self.assertEqual(y.secret, 'test-secret') + + def test_get_two_legged_request_keys(self): + y = yql.TwoLegged('test-api-key', 'test-secret') + # Accessed this was because it's private + request = y._TwoLegged__two_legged_request('http://google.com') + self.assertEqual(set(['oauth_nonce', 'oauth_version', 'oauth_timestamp', + 'oauth_consumer_key', 'oauth_signature_method', 'oauth_body_hash', + 'oauth_version', 'oauth_signature']), set(request.keys())) + + def test_get_two_legged_request_values(self): + y = yql.TwoLegged('test-api-key', 'test-secret') + # Accessed this was because it's private + request = y._TwoLegged__two_legged_request('http://google.com') + self.assertEqual(request['oauth_consumer_key'], 'test-api-key') + self.assertEqual(request['oauth_signature_method'], 'HMAC-SHA1') + self.assertEqual(request['oauth_version'], '1.0') + + def test_get_two_legged_request_param(self): + y = yql.TwoLegged('test-api-key', 'test-secret') + # Accessed this way because it's private + request = y._TwoLegged__two_legged_request('http://google.com', + {"test-param": "test"}) + self.assertEqual(request.get('test-param'), 'test') + + +class TwoLeggedStubbedRequestTest(StubbedHttpTestCase): + stub = RequestDataHttpReplacement + + def test_request_for_two_legged(self): + query = 'SELECT * from foo' + y = TestTwoLegged('test-api-key', 'test-secret', httplib2_inst=httplib2.Http()) + signed_url = y.execute(query) + qs = dict(parse_qsl(signed_url.split('?')[1])) + self.assertEqual(qs['q'], query) + self.assertEqual(qs['format'], 'json') + + +class TwoLeggedStubbedFromFileTest(StubbedHttpTestCase): + stub = FileDataHttpReplacement + + def test_get_two_legged_from_file(self): + query = 'SELECT * from foo' + y = yql.TwoLegged('test-api-key', 'test-secret', httplib2_inst=httplib2.Http()) + # Accessed this was because it's private + self.assertTrue(y.execute(query) is not None) + + +class ThreeLeggedTest(TestCase): + @raises(TypeError) + def test_yql_with_3leg_auth_raises_typerror(self): + TestThreeLegged() + + def test_api_key_and_secret_attrs2(self): + y = yql.ThreeLegged('test-api-key', 'test-secret') + self.assertEqual(y.api_key, 'test-api-key') + self.assertEqual(y.secret, 'test-secret') + + def test_get_base_params(self): + y = yql.ThreeLegged('test-api-key', 'test-secret') + result = y.get_base_params() + self.assertEqual(set(['oauth_nonce', 'oauth_version', 'oauth_timestamp']), + set(result.keys())) + + @raises(ValueError) + def test_raises_for_three_legged_with_no_token(self): + query = 'SELECT * from foo' + y = TestThreeLegged('test-api-key', 'test-secret', httplib2_inst=httplib2.Http()) + y.execute(query) + + +class ThreeLeggedStubbedRequestTest(StubbedHttpTestCase): + stub = RequestDataHttpReplacement + + def test_request_for_three_legged(self): + query = 'SELECT * from foo' + y = TestThreeLegged('test-api-key', 'test-secret', + httplib2_inst=httplib2.Http()) + token = oauth.Token.from_string( + 'oauth_token=foo&oauth_token_secret=bar') + signed_url = y.execute(query, token=token) + qs = dict(parse_qsl(signed_url.split('?')[1])) + self.assertEqual(qs['q'], query) + self.assertEqual(qs['format'], 'json') + + +class ThreeLeggedStubbedFromFileTest(StubbedHttpTestCase): + stub = FileDataHttpReplacement + + def test_three_legged_execution(self): + query = 'SELECT * from foo WHERE dog=@dog' + y = yql.ThreeLegged('test','test2', httplib2_inst=httplib2.Http()) + token = yql.YahooToken('test', 'test2') + content = y.execute(query, {"dog": "fifi"}, token=token) + self.assertEqual(content.count, 3) + + @raises(ValueError) + def test_three_legged_execution_raises_value_error_with_invalid_uri(self): + y = yql.ThreeLegged('test','test2', httplib2_inst=httplib2.Http()) + y.uri = "fail" + token = yql.YahooToken('tes1t', 'test2') + y.execute("SELECT foo meh meh ", token=token) + + def test_get_access_token_request3(self): + y = yql.ThreeLegged('test', 'test-does-not-exist', + httplib2_inst=httplib2.Http()) + new_token = yql.YahooToken('test', 'test2') + new_token.session_handle = 'sess_handle_test' + token = y.refresh_token(token=new_token) + self.assertTrue(hasattr(token, 'key')) + self.assertTrue(hasattr(token, 'secret')) diff --git a/lib/yql/tests/test_services.py b/lib/yql/tests/test_services.py new file mode 100644 index 0000000..b56a503 --- /dev/null +++ b/lib/yql/tests/test_services.py @@ -0,0 +1,12 @@ +from unittest import TestCase + +from nose.tools import raises + +import yql + + +class PublicTest(TestCase): + @raises(ValueError) + def test_cannot_use_unrecognizable_endpoint(self): + y = yql.Public() + y.endpoint = 'some-strange-endpoint' diff --git a/lib/yql/tests/test_storage.py b/lib/yql/tests/test_storage.py new file mode 100644 index 0000000..ccff159 --- /dev/null +++ b/lib/yql/tests/test_storage.py @@ -0,0 +1,61 @@ +import os +import tempfile +from unittest import TestCase + +from nose.tools import raises + +from yql import YahooToken +from yql.storage import BaseTokenStore, FileTokenStore, TokenStoreError + + +class BaseTokenStoreTest(TestCase): + @raises(NotImplementedError) + def test_must_implement_set(self): + class FooStore(BaseTokenStore): + pass + store = FooStore() + store.set('some name', 'some token') + + @raises(NotImplementedError) + def test_must_implement_get(self): + class FooStore(BaseTokenStore): + pass + store = FooStore() + store.get('some name') + + +class FileTokenStoreTest(TestCase): + @raises(TokenStoreError) + def test_must_be_instanced_with_an_existant_path(self): + FileTokenStore('/some/inexistant/path') + + def test_saves_token_string_to_filesystem(self): + directory = tempfile.mkdtemp() + store = FileTokenStore(directory) + store.set('foo', '?key=some-token') + with open(store.get_filepath('foo')) as stored_file: + self.assertTrue('some-token' in stored_file.read()) + + def test_retrieves_token_from_filesystem(self): + directory = tempfile.mkdtemp() + store = FileTokenStore(directory) + store.set('foo', '?key=%s&oauth_token=some-oauth-token&'\ + 'oauth_token_secret=some-token-secret' % 'some-token') + token = store.get('foo') + self.assertTrue('some-token' in token.to_string()) + + def test_cannot_retrieve_token_if_path_doesnt_exist(self): + directory = tempfile.mkdtemp() + store = FileTokenStore(directory) + store.set('foo', '?key=%s&oauth_token=some-oauth-token&'\ + 'oauth_token_secret=some-token-secret' % 'some-token') + os.remove(store.get_filepath('foo')) + self.assertTrue(store.get('foo') is None) + + def test_saves_token_to_filesystem(self): + directory = tempfile.mkdtemp() + store = FileTokenStore(directory) + token = YahooToken('some-token', 'some-secret') + store.set('foo', token) + with open(store.get_filepath('foo')) as stored_file: + self.assertTrue('some-token' in stored_file.read()) diff --git a/lib/yql/tests/test_utilities.py b/lib/yql/tests/test_utilities.py new file mode 100644 index 0000000..ccb5289 --- /dev/null +++ b/lib/yql/tests/test_utilities.py @@ -0,0 +1,40 @@ +from unittest import TestCase + +from yql.utils import get_http_method + + +class UtilitiesTest(TestCase): + def test_finds_get_method_for_select_query(self): + self.assertEqual(get_http_method("SELECT foo"), "GET") + + def test_finds_get_method_for_select_query_with_leading_space(self): + self.assertEqual(get_http_method(" SELECT foo"), "GET") + + def test_finds_get_method_for_lowercase_select_query(self): + self.assertEqual(get_http_method("select foo"), "GET") + + def test_finds_post_method_for_insert_query(self): + self.assertEqual(get_http_method("INSERT into"), "POST") + + def test_finds_post_method_for_multiline_insert_query(self): + query = """ + INSERT INTO yql.queries.query (name, query) + VALUES ("weather", "SELECT * FROM weather.forecast + WHERE location=90210") + """ + self.assertEqual(get_http_method(query), "POST") + + def test_finds_put_method_for_update_query(self): + self.assertEqual(get_http_method("update foo"), "PUT") + + def test_finds_post_method_for_delete_query(self): + self.assertEqual(get_http_method("DELETE from"), "POST") + + def test_finds_post_method_for_lowercase_delete_query(self): + self.assertEqual(get_http_method("delete from"), "POST") + + def test_finds_get_method_for_show_query(self): + self.assertEqual(get_http_method("SHOW tables"), "GET") + + def test_finds_get_method_for_describe_query(self): + self.assertEqual(get_http_method("DESC tablename"), "GET") diff --git a/lib/yql/tests/test_yahoo_token.py b/lib/yql/tests/test_yahoo_token.py new file mode 100644 index 0000000..f4f0162 --- /dev/null +++ b/lib/yql/tests/test_yahoo_token.py @@ -0,0 +1,81 @@ +from unittest import TestCase + +from nose.tools import raises +try: + from urlparse import parse_qs, parse_qsl +except ImportError: + from cgi import parse_qs, parse_qsl + +import yql + + +class YahooTokenTest(TestCase): + def test_create_yahoo_token(self): + token = yql.YahooToken('test-key', 'test-secret') + self.assertEqual(token.key, 'test-key') + self.assertEqual(token.secret, 'test-secret') + + def test_y_token_to_string(self): + token = yql.YahooToken('test-key', 'test-secret') + token_to_string = token.to_string() + string_data = dict(parse_qsl(token_to_string)) + self.assertEqual(string_data.get('oauth_token'), 'test-key') + self.assertEqual(string_data.get('oauth_token_secret'), 'test-secret') + + def test_y_token_to_string2(self): + token = yql.YahooToken('test-key', 'test-secret') + + token.timestamp = '1111' + token.session_handle = 'poop' + token.callback_confirmed = 'basilfawlty' + + token_to_string = token.to_string() + string_data = dict(parse_qsl(token_to_string)) + self.assertEqual(string_data.get('oauth_token'), 'test-key') + self.assertEqual(string_data.get('oauth_token_secret'), 'test-secret') + self.assertEqual(string_data.get('token_creation_timestamp'), '1111') + self.assertEqual(string_data.get('oauth_callback_confirmed'), 'basilfawlty') + self.assertEqual(string_data.get('oauth_session_handle'), 'poop') + + def test_y_token_from_string(self): + token_string = "oauth_token=foo&oauth_token_secret=bar&"\ + "oauth_session_handle=baz&token_creation_timestamp=1111" + token_from_string = yql.YahooToken.from_string(token_string) + self.assertEqual(token_from_string.key, 'foo') + self.assertEqual(token_from_string.secret, 'bar') + self.assertEqual(token_from_string.session_handle, 'baz') + self.assertEqual(token_from_string.timestamp, '1111') + + @raises(ValueError) + def test_y_token_raises_value_error(self): + yql.YahooToken.from_string('') + + @raises(ValueError) + def test_y_token_raises_value_error2(self): + yql.YahooToken.from_string('foo') + + @raises(ValueError) + def test_y_token_raises_value_error3(self): + yql.YahooToken.from_string('oauth_token=bar') + + @raises(ValueError) + def test_y_token_raises_value_error4(self): + yql.YahooToken.from_string('oauth_token_secret=bar') + + @raises(AttributeError) + def test_y_token_without_timestamp_raises(self): + token = yql.YahooToken('test', 'test2') + y = yql.ThreeLegged('test', 'test2') + y.check_token(token) + + def test_y_token_without_timestamp_raises2(self): + + def refresh_token_replacement(token): + return 'replaced' + + y = yql.ThreeLegged('test', 'test2') + y.refresh_token = refresh_token_replacement + + token = yql.YahooToken('test', 'test2') + token.timestamp = 11111 + self.assertEqual(y.check_token(token), 'replaced') diff --git a/lib/yql/tests/test_yql_object.py b/lib/yql/tests/test_yql_object.py new file mode 100644 index 0000000..1e6743c --- /dev/null +++ b/lib/yql/tests/test_yql_object.py @@ -0,0 +1,105 @@ +"""Tests for the YQL object""" + +import json +from unittest import TestCase + +from nose.tools import raises + +from yql import YQLObj, NotOneError + + +data_dict = json.loads("""{"query":{"count":"3","created":"2009-11-20T12:11:56Z","lang":"en-US","updated":"2009-11-20T12:11:56Z","uri":"http://query.yahooapis.com/v1/yql?q=select+*+from+flickr.photos.search+where+text%3D%22panda%22+limit+3","diagnostics":{"publiclyCallable":"true","url":{"execution-time":"742","content":"http://api.flickr.com/services/rest/?method=flickr.photos.search&text=panda&page=1&per_page=10"},"user-time":"745","service-time":"742","build-version":"3805"},"results":{"photo":[{"farm":"3","id":"4117944207","isfamily":"0","isfriend":"0","ispublic":"1","owner":"12346075@N00","secret":"ce1f6092de","server":"2510","title":"Pandas"},{"farm":"3","id":"4118710292","isfamily":"0","isfriend":"0","ispublic":"1","owner":"12346075@N00","secret":"649632a3e2","server":"2754","title":"Pandas"},{"farm":"3","id":"4118698318","isfamily":"0","isfriend":"0","ispublic":"1","owner":"28451051@N02","secret":"ec0b508684","server":"2586","title":"fuzzy flowers (Kalanchoe tomentosa)"}]}}}""") +data_dict2 = json.loads("""{"query":{"count":"1","created":"2009-11-20T12:11:56Z","lang":"en-US","updated":"2009-11-20T12:11:56Z","uri":"http://query.yahooapis.com/v1/yql?q=select+*+from+flickr.photos.search+where+text%3D%22panda%22+limit+3","diagnostics":{"publiclyCallable":"true","url":{"execution-time":"742","content":"http://api.flickr.com/services/rest/?method=flickr.photos.search&text=panda&page=1&per_page=10"},"user-time":"745","service-time":"742","build-version":"3805"},"results":{"photo":{"farm":"3","id":"4117944207","isfamily":"0","isfriend":"0","ispublic":"1","owner":"12346075@N00","secret":"ce1f6092de","server":"2510","title":"Pandas"}}}}""") + + +yqlobj = YQLObj(data_dict) +yqlobj2 = YQLObj({}) +yqlobj3 = YQLObj(data_dict2) + + +class YQLObjTest(TestCase): + @raises(AttributeError) + def test_yql_object_one(self): + """Test that invalid query raises AttributeError""" + yqlobj.query = 1 + + def test_yqlobj_uri(self): + """Test that the query uri is as expected.""" + self.assertEqual(yqlobj.uri, u"http://query.yahooapis.com/v1/yql?q=select+*+"\ + "from+flickr.photos.search+where+text%3D%22panda%22+limit+3") + + def test_yqlobj_query(self): + """Test retrieval of the actual query""" + self.assertEqual(yqlobj.query, u'select * from flickr.photos.search '\ + 'where text="panda" limit 3') + + def test_yqlobj_count(self): + """Check we have 3 records""" + self.assertEqual(yqlobj.count, 3) + + def test_yqlobj_lang(self): + """Check the lang attr.""" + self.assertEqual(yqlobj.lang, u"en-US") + + def test_yqlobj_results(self): + """Check the results.""" + expected_results = {u'photo': [ + {u'isfamily': u'0', + u'title': u'Pandas', + u'farm': u'3', + u'ispublic': u'1', + u'server': u'2510', + u'isfriend': u'0', + u'secret': u'ce1f6092de', + u'owner': u'12346075@N00', + u'id': u'4117944207'}, + {u'isfamily': u'0', + u'title': u'Pandas', + u'farm': u'3', + u'ispublic': u'1', + u'server': u'2754', + u'isfriend': u'0', + u'secret': u'649632a3e2', + u'owner': u'12346075@N00', + u'id': u'4118710292'}, + {u'isfamily': u'0', + u'title': u'fuzzy flowers (Kalanchoe tomentosa)', + u'farm': u'3', + u'ispublic': u'1', + u'server': u'2586', + u'isfriend': u'0', + u'secret': u'ec0b508684', + u'owner': u'28451051@N02', + u'id': u'4118698318'} + ]} + self.assertEqual(yqlobj.results, expected_results) + + def test_yqlobj_raw(self): + """Check the raw attr.""" + self.assertEqual(yqlobj.raw, data_dict.get('query')) + + def test_yqlobj_diagnostics(self): + """Check the diagnostics""" + self.assertEqual(yqlobj.diagnostics, data_dict.get('query').get('diagnostics')) + + def test_query_is_none(self): + """Check query is None with no data.""" + self.assertTrue(yqlobj2.query is None) + + def test_rows(self): + """Test we can iterate over the rows.""" + stuff = [] + for row in yqlobj.rows: + stuff.append(row.get('server')) + + self.assertEqual(stuff, [u'2510', u'2754', u'2586']) + + @raises(NotOneError) + def test_one(self): + """Test that accessing one result raises exception""" + yqlobj.one() + + def test_one_with_one_result(self): + """Test accessing data with one result.""" + res = yqlobj3.one() + self.assertEqual(res.get("title"), "Pandas") diff --git a/lib/yql/utils.py b/lib/yql/utils.py new file mode 100644 index 0000000..bcf1147 --- /dev/null +++ b/lib/yql/utils.py @@ -0,0 +1,38 @@ +""""Utility functions""" +import re + + +METHOD_MAP = ( + ("insert", "POST"), + ("update", "PUT"), + ("delete", "POST"), +) +MULTI_PLUS = re.compile(r"\+{2,}") +MULTI_SPACE = re.compile(r" {2,}") + + +def get_http_method(query): + """Work out if this should be GET, POST, PUT or DELETE""" + lower_query = query.strip().lower() + + http_method = "GET" + for method in METHOD_MAP: + if method[0] in lower_query: + http_method = method[1] + break + + return http_method + + +def clean_url(url): + """Cleans up a uri/url""" + url = url.replace("\n", "") + url = MULTI_PLUS.sub("+", url) + return url + + +def clean_query(query): + """Cleans up a query""" + query = query.replace("\n", "") + query = MULTI_SPACE.sub(" ", query) + return query