An H1
Some text
@@ -1578,7 +1531,7 @@ class TestSoupSelector(TreeTest): self.assertEqual(len(self.soup.select('del')), 0) def test_invalid_tag(self): - self.assertRaises(ValueError, self.soup.select, 'tag%t') + self.assertEqual(len(self.soup.select('tag%t')), 0) def test_header_tags(self): self.assertSelectMultiple( @@ -1611,7 +1564,7 @@ class TestSoupSelector(TreeTest): for el in els: self.assertEqual(el.name, 'p') self.assertEqual(els[1]['class'], ['onep']) - self.assertFalse(els[0].has_attr('class')) + self.assertFalse(els[0].has_key('class')) def test_a_bunch_of_emptys(self): for selector in ('div#main del', 'div#main div.oops', 'div div#main'): @@ -1631,9 +1584,6 @@ class TestSoupSelector(TreeTest): self.assertSelects('.s1 > a', ['s1a1', 's1a2']) self.assertSelects('.s1 > a span', ['s1a2s1']) - def test_child_selector_id(self): - self.assertSelects('.s1 > a#s1a2 span', ['s1a2s1']) - def test_attribute_equals(self): self.assertSelectMultiple( ('p[class="onep"]', ['p1']), @@ -1740,33 +1690,6 @@ class TestSoupSelector(TreeTest): ('p[blah]', []), ) - def test_nth_of_type(self): - # Try to select first paragraph - els = self.soup.select('div#inner p:nth-of-type(1)') - self.assertEqual(len(els), 1) - self.assertEqual(els[0].string, u'Some text') - - # Try to select third paragraph - els = self.soup.select('div#inner p:nth-of-type(3)') - self.assertEqual(len(els), 1) - self.assertEqual(els[0].string, u'Another') - - # Try to select (non-existent!) fourth paragraph - els = self.soup.select('div#inner p:nth-of-type(4)') - self.assertEqual(len(els), 0) - - # Pass in an invalid value. - self.assertRaises( - ValueError, self.soup.select, 'div p:nth-of-type(0)') - - def test_nth_of_type_direct_descendant(self): - els = self.soup.select('div#inner > p:nth-of-type(1)') - self.assertEqual(len(els), 1) - self.assertEqual(els[0].string, u'Some text') - - def test_id_child_selector_nth_of_type(self): - self.assertSelects('#inner > p:nth-of-type(2)', ['p1']) - def test_select_on_element(self): # Other tests operate on the tree; this operates on an element # within the tree. @@ -1775,26 +1698,3 @@ class TestSoupSelector(TreeTest): # The tag was selected. The
# tag was not.
self.assertSelectsIDs(selected, ['inner'])
-
- def test_overspecified_child_id(self):
- self.assertSelects(".fancy #inner", ['inner'])
- self.assertSelects(".normal #inner", [])
-
- def test_adjacent_sibling_selector(self):
- self.assertSelects('#p1 + h2', ['header2'])
- self.assertSelects('#p1 + h2 + p', ['pmulti'])
- self.assertSelects('#p1 + #header2 + .class1', ['pmulti'])
- self.assertEqual([], self.soup.select('#p1 + p'))
-
- def test_general_sibling_selector(self):
- self.assertSelects('#p1 ~ h2', ['header2', 'header3'])
- self.assertSelects('#p1 ~ #header2', ['header2'])
- self.assertSelects('#p1 ~ h2 + a', ['me'])
- self.assertSelects('#p1 ~ h2 + [rel="me"]', ['me'])
- self.assertEqual([], self.soup.select('#inner ~ h2'))
-
- def test_dangling_combinator(self):
- self.assertRaises(ValueError, self.soup.select, 'h1 >')
-
- def test_sibling_combinator_wont_select_same_tag_twice(self):
- self.assertSelects('p[lang] ~ p', ['lang-en-gb', 'lang-en-us', 'lang-fr'])
diff --git a/lib/pygeoip/COPYING b/lib/pygeoip/COPYING
deleted file mode 100644
index 02bbb60..0000000
--- a/lib/pygeoip/COPYING
+++ /dev/null
@@ -1,165 +0,0 @@
- GNU LESSER GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
-
- This version of the GNU Lesser General Public License incorporates
-the terms and conditions of version 3 of the GNU General Public
-License, supplemented by the additional permissions listed below.
-
- 0. Additional Definitions.
-
- As used herein, "this License" refers to version 3 of the GNU Lesser
-General Public License, and the "GNU GPL" refers to version 3 of the GNU
-General Public License.
-
- "The Library" refers to a covered work governed by this License,
-other than an Application or a Combined Work as defined below.
-
- An "Application" is any work that makes use of an interface provided
-by the Library, but which is not otherwise based on the Library.
-Defining a subclass of a class defined by the Library is deemed a mode
-of using an interface provided by the Library.
-
- A "Combined Work" is a work produced by combining or linking an
-Application with the Library. The particular version of the Library
-with which the Combined Work was made is also called the "Linked
-Version".
-
- The "Minimal Corresponding Source" for a Combined Work means the
-Corresponding Source for the Combined Work, excluding any source code
-for portions of the Combined Work that, considered in isolation, are
-based on the Application, and not on the Linked Version.
-
- The "Corresponding Application Code" for a Combined Work means the
-object code and/or source code for the Application, including any data
-and utility programs needed for reproducing the Combined Work from the
-Application, but excluding the System Libraries of the Combined Work.
-
- 1. Exception to Section 3 of the GNU GPL.
-
- You may convey a covered work under sections 3 and 4 of this License
-without being bound by section 3 of the GNU GPL.
-
- 2. Conveying Modified Versions.
-
- If you modify a copy of the Library, and, in your modifications, a
-facility refers to a function or data to be supplied by an Application
-that uses the facility (other than as an argument passed when the
-facility is invoked), then you may convey a copy of the modified
-version:
-
- a) under this License, provided that you make a good faith effort to
- ensure that, in the event an Application does not supply the
- function or data, the facility still operates, and performs
- whatever part of its purpose remains meaningful, or
-
- b) under the GNU GPL, with none of the additional permissions of
- this License applicable to that copy.
-
- 3. Object Code Incorporating Material from Library Header Files.
-
- The object code form of an Application may incorporate material from
-a header file that is part of the Library. You may convey such object
-code under terms of your choice, provided that, if the incorporated
-material is not limited to numerical parameters, data structure
-layouts and accessors, or small macros, inline functions and templates
-(ten or fewer lines in length), you do both of the following:
-
- a) Give prominent notice with each copy of the object code that the
- Library is used in it and that the Library and its use are
- covered by this License.
-
- b) Accompany the object code with a copy of the GNU GPL and this license
- document.
-
- 4. Combined Works.
-
- You may convey a Combined Work under terms of your choice that,
-taken together, effectively do not restrict modification of the
-portions of the Library contained in the Combined Work and reverse
-engineering for debugging such modifications, if you also do each of
-the following:
-
- a) Give prominent notice with each copy of the Combined Work that
- the Library is used in it and that the Library and its use are
- covered by this License.
-
- b) Accompany the Combined Work with a copy of the GNU GPL and this license
- document.
-
- c) For a Combined Work that displays copyright notices during
- execution, include the copyright notice for the Library among
- these notices, as well as a reference directing the user to the
- copies of the GNU GPL and this license document.
-
- d) Do one of the following:
-
- 0) Convey the Minimal Corresponding Source under the terms of this
- License, and the Corresponding Application Code in a form
- suitable for, and under terms that permit, the user to
- recombine or relink the Application with a modified version of
- the Linked Version to produce a modified Combined Work, in the
- manner specified by section 6 of the GNU GPL for conveying
- Corresponding Source.
-
- 1) Use a suitable shared library mechanism for linking with the
- Library. A suitable mechanism is one that (a) uses at run time
- a copy of the Library already present on the user's computer
- system, and (b) will operate properly with a modified version
- of the Library that is interface-compatible with the Linked
- Version.
-
- e) Provide Installation Information, but only if you would otherwise
- be required to provide such information under section 6 of the
- GNU GPL, and only to the extent that such information is
- necessary to install and execute a modified version of the
- Combined Work produced by recombining or relinking the
- Application with a modified version of the Linked Version. (If
- you use option 4d0, the Installation Information must accompany
- the Minimal Corresponding Source and Corresponding Application
- Code. If you use option 4d1, you must provide the Installation
- Information in the manner specified by section 6 of the GNU GPL
- for conveying Corresponding Source.)
-
- 5. Combined Libraries.
-
- You may place library facilities that are a work based on the
-Library side by side in a single library together with other library
-facilities that are not Applications and are not covered by this
-License, and convey such a combined library under terms of your
-choice, if you do both of the following:
-
- a) Accompany the combined library with a copy of the same work based
- on the Library, uncombined with any other library facilities,
- conveyed under the terms of this License.
-
- b) Give prominent notice with the combined library that part of it
- is a work based on the Library, and explaining where to find the
- accompanying uncombined form of the same work.
-
- 6. Revised Versions of the GNU Lesser General Public License.
-
- The Free Software Foundation may publish revised and/or new versions
-of the GNU Lesser General Public License from time to time. Such new
-versions will be similar in spirit to the present version, but may
-differ in detail to address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Library as you received it specifies that a certain numbered version
-of the GNU Lesser General Public License "or any later version"
-applies to it, you have the option of following the terms and
-conditions either of that published version or of any later version
-published by the Free Software Foundation. If the Library as you
-received it does not specify a version number of the GNU Lesser
-General Public License, you may choose any version of the GNU Lesser
-General Public License ever published by the Free Software Foundation.
-
- If the Library as you received it specifies that a proxy can decide
-whether future versions of the GNU Lesser General Public License shall
-apply, that proxy's public statement of acceptance of any version is
-permanent authorization for you to choose that version for the
-Library.
\ No newline at end of file
diff --git a/lib/pygeoip/DEVELOPER b/lib/pygeoip/DEVELOPER
deleted file mode 100644
index 6f81d45..0000000
--- a/lib/pygeoip/DEVELOPER
+++ /dev/null
@@ -1,21 +0,0 @@
-Bootstrap manual for developers of pygeoip
-
-Dependencies: tox, nose, epydoc
-
-For testing we are using tox virtualenv-based Python version testing
-and nose as test framwork.
-
-Tox will create virtualenvs for all Python version pygeoip supports
-and installs the current working tree using the setup.py install script.
-Running the tests requires a couple of sample databases found on the
-link below.
-
-Maxmind sample databases for testing can be downloaded here:
-http://www.defunct.cc/maxmind-geoip-samples.tar.gz (58 MB)
-
-Extract the tarball in the tests directory and run tox from the root directory.
-
-Please make sure your code passes all tests before opening pull requests.
-
-All the best,
-William Tisäter
diff --git a/lib/pygeoip/__init__.py b/lib/pygeoip/__init__.py
index b073088..3e89422 100644
--- a/lib/pygeoip/__init__.py
+++ b/lib/pygeoip/__init__.py
@@ -1,13 +1,17 @@
-# -*- coding: utf-8 -*-
"""
-Pure Python GeoIP API
+Pure Python GeoIP API. The API is based off of U{MaxMind's C-based Python API},
+but the code itself is based on the U{pure PHP5 API }
+by Jim Winstead and Hans Lellelid.
-The API is based on MaxMind's C-based Python API, but the code itself is
-ported from the Pure PHP GeoIP API by Jim Winstead and Hans Lellelid.
+It is mostly a drop-in replacement, except the
+C{new} and C{open} methods are gone. You should instantiate the L{GeoIP} class yourself:
-@author: Jennifer Ennis
+C{gi = GeoIP('/path/to/GeoIP.dat', pygeoip.MEMORY_CACHE)}
-@license: Copyright(C) 2004 MaxMind LLC
+@author: Jennifer Ennis
+
+@license:
+Copyright(C) 2004 MaxMind LLC
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
@@ -23,43 +27,39 @@ You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see .
"""
+from __future__ import with_statement, absolute_import, division
import os
import math
import socket
import mmap
+import gzip
import codecs
-from threading import Lock
+from StringIO import StringIO
-try:
- from StringIO import StringIO
-except ImportError:
- from io import StringIO, BytesIO
+from . import const
+from .util import ip2long
+from .timezone import time_zone_by_country_and_region
-from pygeoip import util, const
-from pygeoip.const import PY2, PY3
-from pygeoip.timezone import time_zone_by_country_and_region
+import six
-
-STANDARD = const.STANDARD
MMAP_CACHE = const.MMAP_CACHE
MEMORY_CACHE = const.MEMORY_CACHE
-
-ENCODING = const.ENCODING
-
+STANDARD = const.STANDARD
class GeoIPError(Exception):
pass
-
class GeoIPMetaclass(type):
+
def __new__(cls, *args, **kwargs):
"""
Singleton method to gets an instance without reparsing the db. Unique
instances are instantiated based on the filename of the db. Flags are
- ignored for this, i.e. if you initialize one with STANDARD
- flag (default) and then try later to initialize with MEMORY_CACHE, it
- will still return the STANDARD one.
+ ignored for this, i.e. if you initialize one with STANDARD flag (default)
+ and then try later to initialize with MEMORY_CACHE, it will still
+ return the STANDARD one.
"""
+
if not hasattr(cls, '_instances'):
cls._instances = {}
@@ -68,25 +68,25 @@ class GeoIPMetaclass(type):
elif 'filename' in kwargs:
filename = kwargs['filename']
- if filename not in cls._instances:
+ if not filename in cls._instances:
cls._instances[filename] = type.__new__(cls, *args, **kwargs)
return cls._instances[filename]
-
GeoIPBase = GeoIPMetaclass('GeoIPBase', (object,), {})
-
class GeoIP(GeoIPBase):
+
def __init__(self, filename, flags=0):
"""
Initialize the class.
- @param filename: Path to a geoip database.
+ @param filename: path to a geoip database. If MEMORY_CACHE is used,
+ the file can be gzipped.
@type filename: str
- @param flags: Flags that affect how the database is processed.
- Currently supported flags are STANDARD (the default),
- MEMORY_CACHE (preload the whole file into memory) and
+ @param flags: flags that affect how the database is processed.
+ Currently the only supported flags are STANDARD (the default),
+ MEMORY_CACHE (preload the whole file into memory), and
MMAP_CACHE (access the file via mmap).
@type flags: int
"""
@@ -94,71 +94,42 @@ class GeoIP(GeoIPBase):
self._flags = flags
if self._flags & const.MMAP_CACHE:
- f = open(filename, 'rb')
- access = mmap.ACCESS_READ
- self._filehandle = mmap.mmap(f.fileno(), 0, access=access)
- f.close()
+ with open(filename, 'rb') as f:
+ self._filehandle = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
elif self._flags & const.MEMORY_CACHE:
- f = open(filename, 'rb')
- self._memoryBuffer = f.read()
- iohandle = BytesIO if PY3 else StringIO
- self._filehandle = iohandle(self._memoryBuffer)
- f.close()
+ if filename.endswith('.gz'):
+ opener = gzip.open
+ else:
+ opener = open
+ with opener(filename, 'rb') as f:
+ self._memoryBuffer = f.read()
+ self._filehandle = StringIO(self._memoryBuffer)
else:
- self._filehandle = codecs.open(filename, 'rb', ENCODING)
+ self._filehandle = codecs.open(filename, 'rb','latin_1')
- self._lock = Lock()
self._setup_segments()
def _setup_segments(self):
"""
- Parses the database file to determine what kind of database is
- being used and setup segment sizes and start points that will
- be used by the seek*() methods later.
-
- Supported databases:
-
- * COUNTRY_EDITION
- * COUNTRY_EDITION_V6
- * REGION_EDITION_REV0
- * REGION_EDITION_REV1
- * CITY_EDITION_REV0
- * CITY_EDITION_REV1
- * CITY_EDITION_REV1_V6
- * ORG_EDITION
- * ISP_EDITION
- * ASNUM_EDITION
- * ASNUM_EDITION_V6
-
+ Parses the database file to determine what kind of database is being used and setup
+ segment sizes and start points that will be used by the seek*() methods later.
"""
self._databaseType = const.COUNTRY_EDITION
self._recordLength = const.STANDARD_RECORD_LENGTH
- self._databaseSegments = const.COUNTRY_BEGIN
- self._lock.acquire()
filepos = self._filehandle.tell()
self._filehandle.seek(-3, os.SEEK_END)
for i in range(const.STRUCTURE_INFO_MAX_SIZE):
- chars = chr(255) * 3
delim = self._filehandle.read(3)
- if PY3 and type(delim) is bytes:
- delim = delim.decode(ENCODING)
+ if delim == six.u(chr(255) * 3):
+ self._databaseType = ord(self._filehandle.read(1))
- if PY2:
- chars = chars.decode(ENCODING)
- if type(delim) is str:
- delim = delim.decode(ENCODING)
-
- if delim == chars:
- byte = self._filehandle.read(1)
- self._databaseType = ord(byte)
-
- # Compatibility with databases from April 2003 and earlier
if (self._databaseType >= 106):
+ # backwards compatibility with databases from April 2003 and earlier
self._databaseType -= 105
if self._databaseType == const.REGION_EDITION_REV0:
@@ -169,29 +140,51 @@ class GeoIP(GeoIPBase):
elif self._databaseType in (const.CITY_EDITION_REV0,
const.CITY_EDITION_REV1,
- const.CITY_EDITION_REV1_V6,
const.ORG_EDITION,
const.ISP_EDITION,
- const.ASNUM_EDITION,
- const.ASNUM_EDITION_V6):
+ const.ASNUM_EDITION):
self._databaseSegments = 0
buf = self._filehandle.read(const.SEGMENT_RECORD_LENGTH)
- if PY3 and type(buf) is bytes:
- buf = buf.decode(ENCODING)
-
for j in range(const.SEGMENT_RECORD_LENGTH):
self._databaseSegments += (ord(buf[j]) << (j * 8))
- LONG_RECORDS = (const.ORG_EDITION, const.ISP_EDITION)
- if self._databaseType in LONG_RECORDS:
+ if self._databaseType in (const.ORG_EDITION, const.ISP_EDITION):
self._recordLength = const.ORG_RECORD_LENGTH
+
break
else:
self._filehandle.seek(-4, os.SEEK_CUR)
+ if self._databaseType == const.COUNTRY_EDITION:
+ self._databaseSegments = const.COUNTRY_BEGIN
+
self._filehandle.seek(filepos, os.SEEK_SET)
- self._lock.release()
+
+ def _lookup_country_id(self, addr):
+ """
+ Get the country index.
+
+ This method is called by the _lookupCountryCode and _lookupCountryName
+ methods. It looks up the index ('id') for the country which is the key
+ for the code and name.
+
+ @param addr: The IP address
+ @type addr: str
+ @return: network byte order 32-bit integer
+ @rtype: int
+ """
+
+ ipnum = ip2long(addr)
+
+ if not ipnum:
+ raise ValueError("Invalid IP address: %s" % addr)
+
+ if self._databaseType != const.COUNTRY_EDITION:
+ raise GeoIPError('Invalid database type; country_* methods expect '\
+ 'Country database')
+
+ return self._seek_country(ipnum) - const.COUNTRY_BEGIN
def _seek_country(self, ipnum):
"""
@@ -203,119 +196,117 @@ class GeoIP(GeoIPBase):
@return: offset of start of record
@rtype: int
"""
- try:
- offset = 0
- seek_depth = 127 if len(str(ipnum)) > 10 else 31
+ offset = 0
- for depth in range(seek_depth, -1, -1):
- if self._flags & const.MEMORY_CACHE:
- startIndex = 2 * self._recordLength * offset
- endIndex = startIndex + (2 * self._recordLength)
- buf = self._memoryBuffer[startIndex:endIndex]
- else:
- startIndex = 2 * self._recordLength * offset
- readLength = 2 * self._recordLength
- self._lock.acquire()
- self._filehandle.seek(startIndex, os.SEEK_SET)
- buf = self._filehandle.read(readLength)
- self._lock.release()
+ for depth in range(31, -1, -1):
- if PY3 and type(buf) is bytes:
- buf = buf.decode(ENCODING)
+ if self._flags & const.MEMORY_CACHE:
+ startIndex = 2 * self._recordLength * offset
+ length = 2 * self._recordLength
+ endIndex = startIndex + length
+ buf = self._memoryBuffer[startIndex:endIndex]
+ else:
+ self._filehandle.seek(2 * self._recordLength * offset, os.SEEK_SET)
+ buf = self._filehandle.read(2 * self._recordLength)
- x = [0, 0]
- for i in range(2):
- for j in range(self._recordLength):
- byte = buf[self._recordLength * i + j]
- x[i] += ord(byte) << (j * 8)
- if ipnum & (1 << depth):
- if x[1] >= self._databaseSegments:
- return x[1]
- offset = x[1]
- else:
- if x[0] >= self._databaseSegments:
- return x[0]
- offset = x[0]
- except:
- pass
+ x = [0,0]
- raise GeoIPError('Corrupt database')
+ for i in range(2):
+ for j in range(self._recordLength):
+ x[i] += ord(buf[self._recordLength * i + j]) << (j * 8)
+
+ if ipnum & (1 << depth):
+
+ if x[1] >= self._databaseSegments:
+ return x[1]
+
+ offset = x[1]
+
+ else:
+
+ if x[0] >= self._databaseSegments:
+ return x[0]
+
+ offset = x[0]
+
+
+ raise Exception('Error traversing database - perhaps it is corrupt?')
def _get_org(self, ipnum):
"""
- Seek and return organization or ISP name for ipnum.
+ Seek and return organization (or ISP) name for converted IP addr.
@param ipnum: Converted IP address
@type ipnum: int
@return: org/isp name
@rtype: str
"""
+
seek_org = self._seek_country(ipnum)
if seek_org == self._databaseSegments:
return None
- read_length = (2 * self._recordLength - 1) * self._databaseSegments
- self._lock.acquire()
- self._filehandle.seek(seek_org + read_length, os.SEEK_SET)
- buf = self._filehandle.read(const.MAX_ORG_RECORD_LENGTH)
- self._lock.release()
+ record_pointer = seek_org + (2 * self._recordLength - 1) * self._databaseSegments
- if PY3 and type(buf) is bytes:
- buf = buf.decode(ENCODING)
+ self._filehandle.seek(record_pointer, os.SEEK_SET)
- return buf[:buf.index(chr(0))]
+ org_buf = self._filehandle.read(const.MAX_ORG_RECORD_LENGTH)
+
+ return org_buf[:org_buf.index(chr(0))]
def _get_region(self, ipnum):
"""
- Seek and return the region info (dict containing country_code
- and region_name).
+ Seek and return the region info (dict containing country_code and region_name).
- @param ipnum: Converted IP address
+ @param ipnum: converted IP address
@type ipnum: int
@return: dict containing country_code and region_name
@rtype: dict
"""
- region = ''
country_code = ''
- seek_country = self._seek_country(ipnum)
-
- def get_region_name(offset):
- region1 = chr(offset // 26 + 65)
- region2 = chr(offset % 26 + 65)
- return ''.join([region1, region2])
+ region = ''
if self._databaseType == const.REGION_EDITION_REV0:
+ seek_country = self._seek_country(ipnum)
seek_region = seek_country - const.STATE_BEGIN_REV0
if seek_region >= 1000:
country_code = 'US'
- region = get_region_name(seek_region - 1000)
+ region = ''.join([chr((seek_region // 1000) // 26 + 65), chr((seek_region // 1000) % 26 + 65)])
else:
country_code = const.COUNTRY_CODES[seek_region]
+ region = ''
elif self._databaseType == const.REGION_EDITION_REV1:
+ seek_country = self._seek_country(ipnum)
seek_region = seek_country - const.STATE_BEGIN_REV1
if seek_region < const.US_OFFSET:
- pass
+ country_code = '';
+ region = ''
elif seek_region < const.CANADA_OFFSET:
country_code = 'US'
- region = get_region_name(seek_region - const.US_OFFSET)
- elif seek_region < const.WORLD_OFFSET:
+ region = ''.join([chr((seek_region - const.US_OFFSET) // 26 + 65), chr((seek_region - const.US_OFFSET) % 26 + 65)])
+ elif seek_region < const.WORLD_OFFSET:
country_code = 'CA'
- region = get_region_name(seek_region - const.CANADA_OFFSET)
+ region = ''.join([chr((seek_region - const.CANADA_OFFSET) // 26 + 65), chr((seek_region - const.CANADA_OFFSET) % 26 + 65)])
else:
- index = (seek_region - const.WORLD_OFFSET) // const.FIPS_RANGE
- if index in const.COUNTRY_CODES:
- country_code = const.COUNTRY_CODES[index]
- elif self._databaseType in const.CITY_EDITIONS:
- rec = self._get_record(ipnum)
- region = rec.get('region_name', '')
- country_code = rec.get('country_code', '')
+ i = (seek_region - const.WORLD_OFFSET) // const.FIPS_RANGE
+ if i < len(const.COUNTRY_CODES):
+ #country_code = const.COUNTRY_CODES[(seek_region - const.WORLD_OFFSET) // const.FIPS_RANGE]
+ country_code = const.COUNTRY_CODES[i]
+ else:
+ country_code = ''
+ region = ''
- return {'country_code': country_code, 'region_name': region}
+ elif self._databaseType in (const.CITY_EDITION_REV0, const.CITY_EDITION_REV1):
+ rec = self._get_record(ipnum)
+ country_code = rec['country_code'] if 'country_code' in rec else ''
+ region = rec['region_name'] if 'region_name' in rec else ''
+
+ return {'country_code' : country_code, 'region_name' : region }
def _get_record(self, ipnum):
"""
Populate location dict for converted IP.
- @param ipnum: Converted IP address
+ @param ipnum: converted IP address
@type ipnum: int
@return: dict with country_code, country_code3, country_name,
region, city, postal_code, latitude, longitude,
@@ -324,115 +315,107 @@ class GeoIP(GeoIPBase):
"""
seek_country = self._seek_country(ipnum)
if seek_country == self._databaseSegments:
- return {}
+ return None
- read_length = (2 * self._recordLength - 1) * self._databaseSegments
- self._lock.acquire()
- self._filehandle.seek(seek_country + read_length, os.SEEK_SET)
- buf = self._filehandle.read(const.FULL_RECORD_LENGTH)
- self._lock.release()
+ record_pointer = seek_country + (2 * self._recordLength - 1) * self._databaseSegments
- if PY3 and type(buf) is bytes:
- buf = buf.decode(ENCODING)
+ self._filehandle.seek(record_pointer, os.SEEK_SET)
+ record_buf = self._filehandle.read(const.FULL_RECORD_LENGTH)
- record = {
- 'dma_code': 0,
- 'area_code': 0,
- 'metro_code': '',
- 'postal_code': ''
- }
+ record = {}
- latitude = 0
- longitude = 0
- buf_pos = 0
-
- # Get country
- char = ord(buf[buf_pos])
+ record_buf_pos = 0
+ char = ord(record_buf[record_buf_pos])
+ #char = record_buf[record_buf_pos] if six.PY3 else ord(record_buf[record_buf_pos])
record['country_code'] = const.COUNTRY_CODES[char]
record['country_code3'] = const.COUNTRY_CODES3[char]
record['country_name'] = const.COUNTRY_NAMES[char]
- record['continent'] = const.CONTINENT_NAMES[char]
+ record_buf_pos += 1
+ str_length = 0
- buf_pos += 1
- def get_data(buf, buf_pos):
- offset = buf_pos
- char = ord(buf[offset])
- while (char != 0):
- offset += 1
- char = ord(buf[offset])
- if offset > buf_pos:
- return (offset, buf[buf_pos:offset])
- return (offset, '')
+ # get region
+ char = ord(record_buf[record_buf_pos+str_length])
+ while (char != 0):
+ str_length += 1
+ char = ord(record_buf[record_buf_pos+str_length])
- offset, record['region_name'] = get_data(buf, buf_pos)
- offset, record['city'] = get_data(buf, offset + 1)
- offset, record['postal_code'] = get_data(buf, offset + 1)
- buf_pos = offset + 1
+ if str_length > 0:
+ record['region_name'] = record_buf[record_buf_pos:record_buf_pos+str_length]
+ record_buf_pos += str_length + 1
+ str_length = 0
+
+ # get city
+ char = ord(record_buf[record_buf_pos+str_length])
+ while (char != 0):
+ str_length += 1
+ char = ord(record_buf[record_buf_pos+str_length])
+
+ if str_length > 0:
+ record['city'] = record_buf[record_buf_pos:record_buf_pos+str_length]
+ else:
+ record['city'] = ''
+
+ record_buf_pos += str_length + 1
+ str_length = 0
+
+ # get the postal code
+ char = ord(record_buf[record_buf_pos+str_length])
+ while (char != 0):
+ str_length += 1
+ char = ord(record_buf[record_buf_pos+str_length])
+
+ if str_length > 0:
+ record['postal_code'] = record_buf[record_buf_pos:record_buf_pos+str_length]
+ else:
+ record['postal_code'] = None
+
+ record_buf_pos += str_length + 1
+ str_length = 0
+
+ latitude = 0
+ longitude = 0
for j in range(3):
- char = ord(buf[buf_pos])
- buf_pos += 1
+ char = ord(record_buf[record_buf_pos])
+ record_buf_pos += 1
latitude += (char << (j * 8))
+ record['latitude'] = (latitude/10000.0) - 180.0
+
for j in range(3):
- char = ord(buf[buf_pos])
- buf_pos += 1
+ char = ord(record_buf[record_buf_pos])
+ record_buf_pos += 1
longitude += (char << (j * 8))
- record['latitude'] = (latitude / 10000.0) - 180.0
- record['longitude'] = (longitude / 10000.0) - 180.0
+ record['longitude'] = (longitude/10000.0) - 180.0
- if self._databaseType in (const.CITY_EDITION_REV1, const.CITY_EDITION_REV1_V6):
+ if self._databaseType == const.CITY_EDITION_REV1:
dmaarea_combo = 0
if record['country_code'] == 'US':
for j in range(3):
- char = ord(buf[buf_pos])
- dmaarea_combo += (char << (j * 8))
- buf_pos += 1
+ char = ord(record_buf[record_buf_pos])
+ record_buf_pos += 1
+ dmaarea_combo += (char << (j*8))
- record['dma_code'] = int(math.floor(dmaarea_combo / 1000))
- record['area_code'] = dmaarea_combo % 1000
+ record['dma_code'] = int(math.floor(dmaarea_combo/1000))
+ record['area_code'] = dmaarea_combo%1000
+ else:
+ record['dma_code'] = 0
+ record['area_code'] = 0
- record['metro_code'] = const.DMA_MAP.get(record['dma_code'])
- params = (record['country_code'], record['region_name'])
- record['time_zone'] = time_zone_by_country_and_region(*params)
+ if 'dma_code' in record and record['dma_code'] in const.DMA_MAP:
+ record['metro_code'] = const.DMA_MAP[record['dma_code']]
+ else:
+ record['metro_code'] = ''
+
+ if 'country_code' in record:
+ record['time_zone'] = time_zone_by_country_and_region(
+ record['country_code'], record.get('region_name')) or ''
+ else:
+ record['time_zone'] = ''
return record
- def _gethostbyname(self, hostname):
- if self._databaseType in const.IPV6_EDITIONS:
- try:
- response = socket.getaddrinfo(hostname, 0, socket.AF_INET6)
- family, socktype, proto, canonname, sockaddr = response[0]
- address, port, flow, scope = sockaddr
- return address
- except socket.gaierror:
- return ''
- else:
- return socket.gethostbyname(hostname)
-
- def id_by_addr(self, addr):
- """
- Get the country index.
- Looks up the index for the country which is the key for
- the code and name.
-
- @param addr: The IP address
- @type addr: str
- @return: network byte order 32-bit integer
- @rtype: int
- """
- ipnum = util.ip2long(addr)
- if not ipnum:
- raise ValueError("Invalid IP address: %s" % addr)
-
- COUNTY_EDITIONS = (const.COUNTRY_EDITION, const.COUNTRY_EDITION_V6)
- if self._databaseType not in COUNTY_EDITIONS:
- message = 'Invalid database type, expected Country'
- raise GeoIPError(message)
-
- return self._seek_country(ipnum) - const.COUNTRY_BEGIN
-
def country_code_by_addr(self, addr):
"""
Returns 2-letter country code (e.g. 'US') for specified IP address.
@@ -444,38 +427,31 @@ class GeoIP(GeoIPBase):
@rtype: str
"""
try:
- VALID_EDITIONS = (const.COUNTRY_EDITION, const.COUNTRY_EDITION_V6)
- if self._databaseType in VALID_EDITIONS:
- ipv = 6 if addr.find(':') >= 0 else 4
-
- if ipv == 4 and self._databaseType != const.COUNTRY_EDITION:
- message = 'Invalid database type; expected IPv6 address'
- raise ValueError(message)
- if ipv == 6 and self._databaseType != const.COUNTRY_EDITION_V6:
- message = 'Invalid database type; expected IPv4 address'
- raise ValueError(message)
-
- country_id = self.id_by_addr(addr)
+ if self._databaseType == const.COUNTRY_EDITION:
+ country_id = self._lookup_country_id(addr)
return const.COUNTRY_CODES[country_id]
- elif self._databaseType in const.REGION_CITY_EDITIONS:
- return self.region_by_addr(addr).get('country_code')
+ elif self._databaseType in (const.REGION_EDITION_REV0, const.REGION_EDITION_REV1,
+ const.CITY_EDITION_REV0, const.CITY_EDITION_REV1):
+ return self.region_by_addr(addr)['country_code']
+ else:
+ raise GeoIPError('Invalid database type; country_* methods expect '\
+ 'Country, City, or Region database')
- message = 'Invalid database type, expected Country, City or Region'
- raise GeoIPError(message)
except ValueError:
- raise GeoIPError('Failed to lookup address %s' % addr)
+ raise GeoIPError('*_by_addr methods only accept IP addresses. Use *_by_name for hostnames. (Address: %s)' % addr)
def country_code_by_name(self, hostname):
"""
Returns 2-letter country code (e.g. 'US') for specified hostname.
Use this method if you have a Country, Region, or City database.
- @param hostname: Hostname
+ @param hostname: host name
@type hostname: str
@return: 2-letter country code
@rtype: str
"""
- addr = self._gethostbyname(hostname)
+ addr = socket.gethostbyname(hostname)
+
return self.country_code_by_addr(addr)
def country_name_by_addr(self, addr):
@@ -489,35 +465,34 @@ class GeoIP(GeoIPBase):
@rtype: str
"""
try:
- VALID_EDITIONS = (const.COUNTRY_EDITION, const.COUNTRY_EDITION_V6)
- if self._databaseType in VALID_EDITIONS:
- country_id = self.id_by_addr(addr)
+ if self._databaseType == const.COUNTRY_EDITION:
+ country_id = self._lookup_country_id(addr)
return const.COUNTRY_NAMES[country_id]
- elif self._databaseType in const.CITY_EDITIONS:
- return self.record_by_addr(addr).get('country_name')
+ elif self._databaseType in (const.CITY_EDITION_REV0, const.CITY_EDITION_REV1):
+ return self.record_by_addr(addr)['country_name']
else:
- message = 'Invalid database type, expected Country or City'
- raise GeoIPError(message)
+ raise GeoIPError('Invalid database type; country_* methods expect '\
+ 'Country or City database')
except ValueError:
- raise GeoIPError('Failed to lookup address %s' % addr)
+ raise GeoIPError('*_by_addr methods only accept IP addresses. Use *_by_name for hostnames. (Address: %s)' % addr)
def country_name_by_name(self, hostname):
"""
Returns full country name for specified hostname.
Use this method if you have a Country database.
- @param hostname: Hostname
+ @param hostname: host name
@type hostname: str
@return: country name
@rtype: str
"""
- addr = self._gethostbyname(hostname)
+ addr = socket.gethostbyname(hostname)
return self.country_name_by_addr(addr)
def org_by_addr(self, addr):
"""
- Lookup Organization, ISP or ASNum for given IP address.
- Use this method if you have an Organization, ISP or ASNum database.
+ Lookup the organization (or ISP) for given IP address.
+ Use this method if you have an Organization/ISP database.
@param addr: IP address
@type addr: str
@@ -525,30 +500,31 @@ class GeoIP(GeoIPBase):
@rtype: str
"""
try:
- ipnum = util.ip2long(addr)
- if not ipnum:
- raise ValueError('Invalid IP address')
+ ipnum = ip2long(addr)
- valid = (const.ORG_EDITION, const.ISP_EDITION, const.ASNUM_EDITION, const.ASNUM_EDITION_V6)
- if self._databaseType not in valid:
- message = 'Invalid database type, expected Org, ISP or ASNum'
- raise GeoIPError(message)
+ if not ipnum:
+ raise ValueError("Invalid IP address: %s" % addr)
+
+ if self._databaseType not in (const.ORG_EDITION, const.ISP_EDITION, const.ASNUM_EDITION):
+ raise GeoIPError('Invalid database type; org_* methods expect '\
+ 'Org/ISP database')
return self._get_org(ipnum)
except ValueError:
- raise GeoIPError('Failed to lookup address %s' % addr)
+ raise GeoIPError('*_by_addr methods only accept IP addresses. Use *_by_name for hostnames. (Address: %s)' % addr)
def org_by_name(self, hostname):
"""
Lookup the organization (or ISP) for hostname.
Use this method if you have an Organization/ISP database.
- @param hostname: Hostname
+ @param hostname: host name
@type hostname: str
- @return: Organization or ISP name
+ @return: organization or ISP name
@rtype: str
"""
- addr = self._gethostbyname(hostname)
+ addr = socket.gethostbyname(hostname)
+
return self.org_by_addr(addr)
def record_by_addr(self, addr):
@@ -558,41 +534,38 @@ class GeoIP(GeoIPBase):
@param addr: IP address
@type addr: str
- @return: Dictionary with country_code, country_code3, country_name,
- region, city, postal_code, latitude, longitude, dma_code,
- metro_code, area_code, region_name, time_zone
+ @return: dict with country_code, country_code3, country_name,
+ region, city, postal_code, latitude, longitude,
+ dma_code, metro_code, area_code, region_name, time_zone
@rtype: dict
"""
try:
- ipnum = util.ip2long(addr)
+ ipnum = ip2long(addr)
+
if not ipnum:
- raise ValueError('Invalid IP address')
+ raise ValueError("Invalid IP address: %s" % addr)
- if self._databaseType not in const.CITY_EDITIONS:
- message = 'Invalid database type, expected City'
- raise GeoIPError(message)
+ if not self._databaseType in (const.CITY_EDITION_REV0, const.CITY_EDITION_REV1):
+ raise GeoIPError('Invalid database type; record_* methods expect City database')
- rec = self._get_record(ipnum)
- if not rec:
- return None
-
- return rec
+ return self._get_record(ipnum)
except ValueError:
- raise GeoIPError('Failed to lookup address %s' % addr)
+ raise GeoIPError('*_by_addr methods only accept IP addresses. Use *_by_name for hostnames. (Address: %s)' % addr)
def record_by_name(self, hostname):
"""
Look up the record for a given hostname.
Use this method if you have a City database.
- @param hostname: Hostname
+ @param hostname: host name
@type hostname: str
- @return: Dictionary with country_code, country_code3, country_name,
- region, city, postal_code, latitude, longitude, dma_code,
- metro_code, area_code, region_name, time_zone
+ @return: dict with country_code, country_code3, country_name,
+ region, city, postal_code, latitude, longitude,
+ dma_code, metro_code, area_code, region_name, time_zone
@rtype: dict
"""
- addr = self._gethostbyname(hostname)
+ addr = socket.gethostbyname(hostname)
+
return self.record_by_addr(addr)
def region_by_addr(self, addr):
@@ -602,33 +575,37 @@ class GeoIP(GeoIPBase):
@param addr: IP address
@type addr: str
- @return: Dictionary containing country_code, region and region_name
+ @return: dict containing country_code, region,
+ and region_name
@rtype: dict
"""
try:
- ipnum = util.ip2long(addr)
- if not ipnum:
- raise ValueError('Invalid IP address')
+ ipnum = ip2long(addr)
- if self._databaseType not in const.REGION_CITY_EDITIONS:
- message = 'Invalid database type, expected Region or City'
- raise GeoIPError(message)
+ if not ipnum:
+ raise ValueError("Invalid IP address: %s" % addr)
+
+ if not self._databaseType in (const.REGION_EDITION_REV0, const.REGION_EDITION_REV1,
+ const.CITY_EDITION_REV0, const.CITY_EDITION_REV1):
+ raise GeoIPError('Invalid database type; region_* methods expect '\
+ 'Region or City database')
return self._get_region(ipnum)
except ValueError:
- raise GeoIPError('Failed to lookup address %s' % addr)
+ raise GeoIPError('*_by_addr methods only accept IP addresses. Use *_by_name for hostnames. (Address: %s)' % addr)
def region_by_name(self, hostname):
"""
Lookup the region for given hostname.
Use this method if you have a Region database.
- @param hostname: Hostname
+ @param hostname: host name
@type hostname: str
- @return: Dictionary containing country_code, region, and region_name
+ @return: dict containing country_code, region,
+ and region_name
@rtype: dict
"""
- addr = self._gethostbyname(hostname)
+ addr = socket.gethostbyname(hostname)
return self.region_by_addr(addr)
def time_zone_by_addr(self, addr):
@@ -636,33 +613,35 @@ class GeoIP(GeoIPBase):
Look up the time zone for a given IP address.
Use this method if you have a Region or City database.
- @param addr: IP address
- @type addr: str
+ @param hostname: IP address
+ @type hostname: str
@return: Time zone
@rtype: str
"""
try:
- ipnum = util.ip2long(addr)
+ ipnum = ip2long(addr)
+
if not ipnum:
- raise ValueError('Invalid IP address')
+ raise ValueError("Invalid IP address: %s" % addr)
- if self._databaseType not in const.CITY_EDITIONS:
- message = 'Invalid database type, expected City'
- raise GeoIPError(message)
+ if not self._databaseType in (const.REGION_EDITION_REV0, const.REGION_EDITION_REV1,
+ const.CITY_EDITION_REV0, const.CITY_EDITION_REV1):
+ raise GeoIPError('Invalid database type; region_* methods expect '\
+ 'Region or City database')
- return self._get_record(ipnum).get('time_zone')
+ return self._get_record(ipnum)['time_zone']
except ValueError:
- raise GeoIPError('Failed to lookup address %s' % addr)
+ raise GeoIPError('*_by_addr methods only accept IP addresses. Use *_by_name for hostnames. (Address: %s)' % addr)
def time_zone_by_name(self, hostname):
"""
Look up the time zone for a given hostname.
Use this method if you have a Region or City database.
- @param hostname: Hostname
+ @param hostname: host name
@type hostname: str
@return: Time zone
@rtype: str
"""
- addr = self._gethostbyname(hostname)
+ addr = socket.gethostbyname(hostname)
return self.time_zone_by_addr(addr)
diff --git a/lib/pygeoip/const.py b/lib/pygeoip/const.py
index b37b045..a215226 100644
--- a/lib/pygeoip/const.py
+++ b/lib/pygeoip/const.py
@@ -1,431 +1,382 @@
-# -*- coding: utf-8 -*-
-"""
-Constants needed for the binary parser. Part of the pygeoip package.
-
-@author: Jennifer Ennis
-
-@license: Copyright(C) 2004 MaxMind LLC
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU Lesser General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU Lesser General Public License
-along with this program. If not, see .
-"""
-
-from platform import python_version_tuple
-
-PY2 = python_version_tuple()[0] == '2'
-PY3 = python_version_tuple()[0] == '3'
-
-GEOIP_STANDARD = 0
-GEOIP_MEMORY_CACHE = 1
-
-DMA_MAP = {
- 500: 'Portland-Auburn, ME',
- 501: 'New York, NY',
- 502: 'Binghamton, NY',
- 503: 'Macon, GA',
- 504: 'Philadelphia, PA',
- 505: 'Detroit, MI',
- 506: 'Boston, MA',
- 507: 'Savannah, GA',
- 508: 'Pittsburgh, PA',
- 509: 'Ft Wayne, IN',
- 510: 'Cleveland, OH',
- 511: 'Washington, DC',
- 512: 'Baltimore, MD',
- 513: 'Flint, MI',
- 514: 'Buffalo, NY',
- 515: 'Cincinnati, OH',
- 516: 'Erie, PA',
- 517: 'Charlotte, NC',
- 518: 'Greensboro, NC',
- 519: 'Charleston, SC',
- 520: 'Augusta, GA',
- 521: 'Providence, RI',
- 522: 'Columbus, GA',
- 523: 'Burlington, VT',
- 524: 'Atlanta, GA',
- 525: 'Albany, GA',
- 526: 'Utica-Rome, NY',
- 527: 'Indianapolis, IN',
- 528: 'Miami, FL',
- 529: 'Louisville, KY',
- 530: 'Tallahassee, FL',
- 531: 'Tri-Cities, TN',
- 532: 'Albany-Schenectady-Troy, NY',
- 533: 'Hartford, CT',
- 534: 'Orlando, FL',
- 535: 'Columbus, OH',
- 536: 'Youngstown-Warren, OH',
- 537: 'Bangor, ME',
- 538: 'Rochester, NY',
- 539: 'Tampa, FL',
- 540: 'Traverse City-Cadillac, MI',
- 541: 'Lexington, KY',
- 542: 'Dayton, OH',
- 543: 'Springfield-Holyoke, MA',
- 544: 'Norfolk-Portsmouth, VA',
- 545: 'Greenville-New Bern-Washington, NC',
- 546: 'Columbia, SC',
- 547: 'Toledo, OH',
- 548: 'West Palm Beach, FL',
- 549: 'Watertown, NY',
- 550: 'Wilmington, NC',
- 551: 'Lansing, MI',
- 552: 'Presque Isle, ME',
- 553: 'Marquette, MI',
- 554: 'Wheeling, WV',
- 555: 'Syracuse, NY',
- 556: 'Richmond-Petersburg, VA',
- 557: 'Knoxville, TN',
- 558: 'Lima, OH',
- 559: 'Bluefield-Beckley-Oak Hill, WV',
- 560: 'Raleigh-Durham, NC',
- 561: 'Jacksonville, FL',
- 563: 'Grand Rapids, MI',
- 564: 'Charleston-Huntington, WV',
- 565: 'Elmira, NY',
- 566: 'Harrisburg-Lancaster-Lebanon-York, PA',
- 567: 'Greenville-Spartenburg, SC',
- 569: 'Harrisonburg, VA',
- 570: 'Florence-Myrtle Beach, SC',
- 571: 'Ft Myers, FL',
- 573: 'Roanoke-Lynchburg, VA',
- 574: 'Johnstown-Altoona, PA',
- 575: 'Chattanooga, TN',
- 576: 'Salisbury, MD',
- 577: 'Wilkes Barre-Scranton, PA',
- 581: 'Terre Haute, IN',
- 582: 'Lafayette, IN',
- 583: 'Alpena, MI',
- 584: 'Charlottesville, VA',
- 588: 'South Bend, IN',
- 592: 'Gainesville, FL',
- 596: 'Zanesville, OH',
- 597: 'Parkersburg, WV',
- 598: 'Clarksburg-Weston, WV',
- 600: 'Corpus Christi, TX',
- 602: 'Chicago, IL',
- 603: 'Joplin-Pittsburg, MO',
- 604: 'Columbia-Jefferson City, MO',
- 605: 'Topeka, KS',
- 606: 'Dothan, AL',
- 609: 'St Louis, MO',
- 610: 'Rockford, IL',
- 611: 'Rochester-Mason City-Austin, MN',
- 612: 'Shreveport, LA',
- 613: 'Minneapolis-St Paul, MN',
- 616: 'Kansas City, MO',
- 617: 'Milwaukee, WI',
- 618: 'Houston, TX',
- 619: 'Springfield, MO',
- 620: 'Tuscaloosa, AL',
- 622: 'New Orleans, LA',
- 623: 'Dallas-Fort Worth, TX',
- 624: 'Sioux City, IA',
- 625: 'Waco-Temple-Bryan, TX',
- 626: 'Victoria, TX',
- 627: 'Wichita Falls, TX',
- 628: 'Monroe, LA',
- 630: 'Birmingham, AL',
- 631: 'Ottumwa-Kirksville, IA',
- 632: 'Paducah, KY',
- 633: 'Odessa-Midland, TX',
- 634: 'Amarillo, TX',
- 635: 'Austin, TX',
- 636: 'Harlingen, TX',
- 637: 'Cedar Rapids-Waterloo, IA',
- 638: 'St Joseph, MO',
- 639: 'Jackson, TN',
- 640: 'Memphis, TN',
- 641: 'San Antonio, TX',
- 642: 'Lafayette, LA',
- 643: 'Lake Charles, LA',
- 644: 'Alexandria, LA',
- 646: 'Anniston, AL',
- 647: 'Greenwood-Greenville, MS',
- 648: 'Champaign-Springfield-Decatur, IL',
- 649: 'Evansville, IN',
- 650: 'Oklahoma City, OK',
- 651: 'Lubbock, TX',
- 652: 'Omaha, NE',
- 656: 'Panama City, FL',
- 657: 'Sherman, TX',
- 658: 'Green Bay-Appleton, WI',
- 659: 'Nashville, TN',
- 661: 'San Angelo, TX',
- 662: 'Abilene-Sweetwater, TX',
- 669: 'Madison, WI',
- 670: 'Ft Smith-Fay-Springfield, AR',
- 671: 'Tulsa, OK',
- 673: 'Columbus-Tupelo-West Point, MS',
- 675: 'Peoria-Bloomington, IL',
- 676: 'Duluth, MN',
- 678: 'Wichita, KS',
- 679: 'Des Moines, IA',
- 682: 'Davenport-Rock Island-Moline, IL',
- 686: 'Mobile, AL',
- 687: 'Minot-Bismarck-Dickinson, ND',
- 691: 'Huntsville, AL',
- 692: 'Beaumont-Port Author, TX',
- 693: 'Little Rock-Pine Bluff, AR',
- 698: 'Montgomery, AL',
- 702: 'La Crosse-Eau Claire, WI',
- 705: 'Wausau-Rhinelander, WI',
- 709: 'Tyler-Longview, TX',
- 710: 'Hattiesburg-Laurel, MS',
- 711: 'Meridian, MS',
- 716: 'Baton Rouge, LA',
- 717: 'Quincy, IL',
- 718: 'Jackson, MS',
- 722: 'Lincoln-Hastings, NE',
- 724: 'Fargo-Valley City, ND',
- 725: 'Sioux Falls, SD',
- 734: 'Jonesboro, AR',
- 736: 'Bowling Green, KY',
- 737: 'Mankato, MN',
- 740: 'North Platte, NE',
- 743: 'Anchorage, AK',
- 744: 'Honolulu, HI',
- 745: 'Fairbanks, AK',
- 746: 'Biloxi-Gulfport, MS',
- 747: 'Juneau, AK',
- 749: 'Laredo, TX',
- 751: 'Denver, CO',
- 752: 'Colorado Springs, CO',
- 753: 'Phoenix, AZ',
- 754: 'Butte-Bozeman, MT',
- 755: 'Great Falls, MT',
- 756: 'Billings, MT',
- 757: 'Boise, ID',
- 758: 'Idaho Falls-Pocatello, ID',
- 759: 'Cheyenne, WY',
- 760: 'Twin Falls, ID',
- 762: 'Missoula, MT',
- 764: 'Rapid City, SD',
- 765: 'El Paso, TX',
- 766: 'Helena, MT',
- 767: 'Casper-Riverton, WY',
- 770: 'Salt Lake City, UT',
- 771: 'Yuma, AZ',
- 773: 'Grand Junction, CO',
- 789: 'Tucson, AZ',
- 790: 'Albuquerque, NM',
- 798: 'Glendive, MT',
- 800: 'Bakersfield, CA',
- 801: 'Eugene, OR',
- 802: 'Eureka, CA',
- 803: 'Los Angeles, CA',
- 804: 'Palm Springs, CA',
- 807: 'San Francisco, CA',
- 810: 'Yakima-Pasco, WA',
- 811: 'Reno, NV',
- 813: 'Medford-Klamath Falls, OR',
- 819: 'Seattle-Tacoma, WA',
- 820: 'Portland, OR',
- 821: 'Bend, OR',
- 825: 'San Diego, CA',
- 828: 'Monterey-Salinas, CA',
- 839: 'Las Vegas, NV',
- 855: 'Santa Barbara, CA',
- 862: 'Sacramento, CA',
- 866: 'Fresno, CA',
- 868: 'Chico-Redding, CA',
- 881: 'Spokane, WA'
-}
-
-COUNTRY_CODES = (
- '',
- 'AP', 'EU', 'AD', 'AE', 'AF', 'AG', 'AI', 'AL', 'AM', 'AN', 'AO', 'AQ',
- 'AR', 'AS', 'AT', 'AU', 'AW', 'AZ', 'BA', 'BB', 'BD', 'BE', 'BF', 'BG',
- 'BH', 'BI', 'BJ', 'BM', 'BN', 'BO', 'BR', 'BS', 'BT', 'BV', 'BW', 'BY',
- 'BZ', 'CA', 'CC', 'CD', 'CF', 'CG', 'CH', 'CI', 'CK', 'CL', 'CM', 'CN',
- 'CO', 'CR', 'CU', 'CV', 'CX', 'CY', 'CZ', 'DE', 'DJ', 'DK', 'DM', 'DO',
- 'DZ', 'EC', 'EE', 'EG', 'EH', 'ER', 'ES', 'ET', 'FI', 'FJ', 'FK', 'FM',
- 'FO', 'FR', 'FX', 'GA', 'GB', 'GD', 'GE', 'GF', 'GH', 'GI', 'GL', 'GM',
- 'GN', 'GP', 'GQ', 'GR', 'GS', 'GT', 'GU', 'GW', 'GY', 'HK', 'HM', 'HN',
- 'HR', 'HT', 'HU', 'ID', 'IE', 'IL', 'IN', 'IO', 'IQ', 'IR', 'IS', 'IT',
- 'JM', 'JO', 'JP', 'KE', 'KG', 'KH', 'KI', 'KM', 'KN', 'KP', 'KR', 'KW',
- 'KY', 'KZ', 'LA', 'LB', 'LC', 'LI', 'LK', 'LR', 'LS', 'LT', 'LU', 'LV',
- 'LY', 'MA', 'MC', 'MD', 'MG', 'MH', 'MK', 'ML', 'MM', 'MN', 'MO', 'MP',
- 'MQ', 'MR', 'MS', 'MT', 'MU', 'MV', 'MW', 'MX', 'MY', 'MZ', 'NA', 'NC',
- 'NE', 'NF', 'NG', 'NI', 'NL', 'NO', 'NP', 'NR', 'NU', 'NZ', 'OM', 'PA',
- 'PE', 'PF', 'PG', 'PH', 'PK', 'PL', 'PM', 'PN', 'PR', 'PS', 'PT', 'PW',
- 'PY', 'QA', 'RE', 'RO', 'RU', 'RW', 'SA', 'SB', 'SC', 'SD', 'SE', 'SG',
- 'SH', 'SI', 'SJ', 'SK', 'SL', 'SM', 'SN', 'SO', 'SR', 'ST', 'SV', 'SY',
- 'SZ', 'TC', 'TD', 'TF', 'TG', 'TH', 'TJ', 'TK', 'TM', 'TN', 'TO', 'TL',
- 'TR', 'TT', 'TV', 'TW', 'TZ', 'UA', 'UG', 'UM', 'US', 'UY', 'UZ', 'VA',
- 'VC', 'VE', 'VG', 'VI', 'VN', 'VU', 'WF', 'WS', 'YE', 'YT', 'RS', 'ZA',
- 'ZM', 'ME', 'ZW', 'A1', 'A2', 'O1', 'AX', 'GG', 'IM', 'JE', 'BL', 'MF',
- 'BQ', 'SS'
-)
-
-COUNTRY_CODES3 = (
- '', 'AP', 'EU', 'AND', 'ARE', 'AFG', 'ATG', 'AIA', 'ALB', 'ARM', 'ANT',
- 'AGO', 'AQ', 'ARG', 'ASM', 'AUT', 'AUS', 'ABW', 'AZE', 'BIH', 'BRB', 'BGD',
- 'BEL', 'BFA', 'BGR', 'BHR', 'BDI', 'BEN', 'BMU', 'BRN', 'BOL', 'BRA',
- 'BHS', 'BTN', 'BV', 'BWA', 'BLR', 'BLZ', 'CAN', 'CC', 'COD', 'CAF', 'COG',
- 'CHE', 'CIV', 'COK', 'CHL', 'CMR', 'CHN', 'COL', 'CRI', 'CUB', 'CPV', 'CX',
- 'CYP', 'CZE', 'DEU', 'DJI', 'DNK', 'DMA', 'DOM', 'DZA', 'ECU', 'EST',
- 'EGY', 'ESH', 'ERI', 'ESP', 'ETH', 'FIN', 'FJI', 'FLK', 'FSM', 'FRO',
- 'FRA', 'FX', 'GAB', 'GBR', 'GRD', 'GEO', 'GUF', 'GHA', 'GIB', 'GRL', 'GMB',
- 'GIN', 'GLP', 'GNQ', 'GRC', 'GS', 'GTM', 'GUM', 'GNB', 'GUY', 'HKG', 'HM',
- 'HND', 'HRV', 'HTI', 'HUN', 'IDN', 'IRL', 'ISR', 'IND', 'IO', 'IRQ', 'IRN',
- 'ISL', 'ITA', 'JAM', 'JOR', 'JPN', 'KEN', 'KGZ', 'KHM', 'KIR', 'COM',
- 'KNA', 'PRK', 'KOR', 'KWT', 'CYM', 'KAZ', 'LAO', 'LBN', 'LCA', 'LIE',
- 'LKA', 'LBR', 'LSO', 'LTU', 'LUX', 'LVA', 'LBY', 'MAR', 'MCO', 'MDA',
- 'MDG', 'MHL', 'MKD', 'MLI', 'MMR', 'MNG', 'MAC', 'MNP', 'MTQ', 'MRT',
- 'MSR', 'MLT', 'MUS', 'MDV', 'MWI', 'MEX', 'MYS', 'MOZ', 'NAM', 'NCL',
- 'NER', 'NFK', 'NGA', 'NIC', 'NLD', 'NOR', 'NPL', 'NRU', 'NIU', 'NZL',
- 'OMN', 'PAN', 'PER', 'PYF', 'PNG', 'PHL', 'PAK', 'POL', 'SPM', 'PCN',
- 'PRI', 'PSE', 'PRT', 'PLW', 'PRY', 'QAT', 'REU', 'ROU', 'RUS', 'RWA',
- 'SAU', 'SLB', 'SYC', 'SDN', 'SWE', 'SGP', 'SHN', 'SVN', 'SJM', 'SVK',
- 'SLE', 'SMR', 'SEN', 'SOM', 'SUR', 'STP', 'SLV', 'SYR', 'SWZ', 'TCA',
- 'TCD', 'TF', 'TGO', 'THA', 'TJK', 'TKL', 'TLS', 'TKM', 'TUN', 'TON', 'TUR',
- 'TTO', 'TUV', 'TWN', 'TZA', 'UKR', 'UGA', 'UM', 'USA', 'URY', 'UZB', 'VAT',
- 'VCT', 'VEN', 'VGB', 'VIR', 'VNM', 'VUT', 'WLF', 'WSM', 'YEM', 'YT', 'SRB',
- 'ZAF', 'ZMB', 'MNE', 'ZWE', 'A1', 'A2', 'O1', 'ALA', 'GGY', 'IMN', 'JEY',
- 'BLM', 'MAF', 'BES', 'SSD'
-)
-
-COUNTRY_NAMES = (
- '', 'Asia/Pacific Region', 'Europe', 'Andorra', 'United Arab Emirates',
- 'Afghanistan', 'Antigua and Barbuda', 'Anguilla', 'Albania', 'Armenia',
- 'Netherlands Antilles', 'Angola', 'Antarctica', 'Argentina',
- 'American Samoa', 'Austria', 'Australia', 'Aruba', 'Azerbaijan',
- 'Bosnia and Herzegovina', 'Barbados', 'Bangladesh', 'Belgium',
- 'Burkina Faso', 'Bulgaria', 'Bahrain', 'Burundi', 'Benin', 'Bermuda',
- 'Brunei Darussalam', 'Bolivia', 'Brazil', 'Bahamas', 'Bhutan',
- 'Bouvet Island', 'Botswana', 'Belarus', 'Belize', 'Canada',
- 'Cocos (Keeling) Islands', 'Congo, The Democratic Republic of the',
- 'Central African Republic', 'Congo', 'Switzerland', 'Cote D\'Ivoire',
- 'Cook Islands', 'Chile', 'Cameroon', 'China', 'Colombia', 'Costa Rica',
- 'Cuba', 'Cape Verde', 'Christmas Island', 'Cyprus', 'Czech Republic',
- 'Germany', 'Djibouti', 'Denmark', 'Dominica', 'Dominican Republic',
- 'Algeria', 'Ecuador', 'Estonia', 'Egypt', 'Western Sahara', 'Eritrea',
- 'Spain', 'Ethiopia', 'Finland', 'Fiji', 'Falkland Islands (Malvinas)',
- 'Micronesia, Federated States of', 'Faroe Islands', 'France',
- 'France, Metropolitan', 'Gabon', 'United Kingdom', 'Grenada', 'Georgia',
- 'French Guiana', 'Ghana', 'Gibraltar', 'Greenland', 'Gambia', 'Guinea',
- 'Guadeloupe', 'Equatorial Guinea', 'Greece',
- 'South Georgia and the South Sandwich Islands', 'Guatemala', 'Guam',
- 'Guinea-Bissau', 'Guyana', 'Hong Kong',
- 'Heard Island and McDonald Islands', 'Honduras', 'Croatia', 'Haiti',
- 'Hungary', 'Indonesia', 'Ireland', 'Israel', 'India',
- 'British Indian Ocean Territory', 'Iraq', 'Iran, Islamic Republic of',
- 'Iceland', 'Italy', 'Jamaica', 'Jordan', 'Japan', 'Kenya', 'Kyrgyzstan',
- 'Cambodia', 'Kiribati', 'Comoros', 'Saint Kitts and Nevis',
- 'Korea, Democratic People\'s Republic of', 'Korea, Republic of', 'Kuwait',
- 'Cayman Islands', 'Kazakhstan', 'Lao People\'s Democratic Republic',
- 'Lebanon', 'Saint Lucia', 'Liechtenstein', 'Sri Lanka', 'Liberia',
- 'Lesotho', 'Lithuania', 'Luxembourg', 'Latvia', 'Libya', 'Morocco',
- 'Monaco', 'Moldova, Republic of', 'Madagascar', 'Marshall Islands',
- 'Macedonia', 'Mali', 'Myanmar', 'Mongolia', 'Macau',
- 'Northern Mariana Islands', 'Martinique', 'Mauritania', 'Montserrat',
- 'Malta', 'Mauritius', 'Maldives', 'Malawi', 'Mexico', 'Malaysia',
- 'Mozambique', 'Namibia', 'New Caledonia', 'Niger', 'Norfolk Island',
- 'Nigeria', 'Nicaragua', 'Netherlands', 'Norway', 'Nepal', 'Nauru', 'Niue',
- 'New Zealand', 'Oman', 'Panama', 'Peru', 'French Polynesia',
- 'Papua New Guinea', 'Philippines', 'Pakistan', 'Poland',
- 'Saint Pierre and Miquelon', 'Pitcairn Islands', 'Puerto Rico',
- 'Palestinian Territory', 'Portugal', 'Palau', 'Paraguay', 'Qatar',
- 'Reunion', 'Romania', 'Russian Federation', 'Rwanda', 'Saudi Arabia',
- 'Solomon Islands', 'Seychelles', 'Sudan', 'Sweden', 'Singapore',
- 'Saint Helena', 'Slovenia', 'Svalbard and Jan Mayen', 'Slovakia',
- 'Sierra Leone', 'San Marino', 'Senegal', 'Somalia', 'Suriname',
- 'Sao Tome and Principe', 'El Salvador', 'Syrian Arab Republic',
- 'Swaziland', 'Turks and Caicos Islands', 'Chad',
- 'French Southern Territories', 'Togo', 'Thailand', 'Tajikistan', 'Tokelau',
- 'Turkmenistan', 'Tunisia', 'Tonga', 'Timor-Leste', 'Turkey',
- 'Trinidad and Tobago', 'Tuvalu', 'Taiwan', 'Tanzania, United Republic of',
- 'Ukraine', 'Uganda', 'United States Minor Outlying Islands',
- 'United States', 'Uruguay', 'Uzbekistan', 'Holy See (Vatican City State)',
- 'Saint Vincent and the Grenadines', 'Venezuela', 'Virgin Islands, British',
- 'Virgin Islands, U.S.', 'Vietnam', 'Vanuatu', 'Wallis and Futuna', 'Samoa',
- 'Yemen', 'Mayotte', 'Serbia', 'South Africa', 'Zambia', 'Montenegro',
- 'Zimbabwe', 'Anonymous Proxy', 'Satellite Provider', 'Other',
- 'Aland Islands', 'Guernsey', 'Isle of Man', 'Jersey', 'Saint Barthelemy',
- 'Saint Martin', 'Bonaire, Sint Eustatius and Saba', 'South Sudan'
-)
-
-CONTINENT_NAMES = (
- '--', 'AS', 'EU', 'EU', 'AS', 'AS', 'NA', 'NA', 'EU', 'AS', 'NA', 'AF',
- 'AN', 'SA', 'OC', 'EU', 'OC', 'NA', 'AS', 'EU', 'NA', 'AS', 'EU', 'AF',
- 'EU', 'AS', 'AF', 'AF', 'NA', 'AS', 'SA', 'SA', 'NA', 'AS', 'AN', 'AF',
- 'EU', 'NA', 'NA', 'AS', 'AF', 'AF', 'AF', 'EU', 'AF', 'OC', 'SA', 'AF',
- 'AS', 'SA', 'NA', 'NA', 'AF', 'AS', 'AS', 'EU', 'EU', 'AF', 'EU', 'NA',
- 'NA', 'AF', 'SA', 'EU', 'AF', 'AF', 'AF', 'EU', 'AF', 'EU', 'OC', 'SA',
- 'OC', 'EU', 'EU', 'NA', 'AF', 'EU', 'NA', 'AS', 'SA', 'AF', 'EU', 'NA',
- 'AF', 'AF', 'NA', 'AF', 'EU', 'AN', 'NA', 'OC', 'AF', 'SA', 'AS', 'AN',
- 'NA', 'EU', 'NA', 'EU', 'AS', 'EU', 'AS', 'AS', 'AS', 'AS', 'AS', 'EU',
- 'EU', 'NA', 'AS', 'AS', 'AF', 'AS', 'AS', 'OC', 'AF', 'NA', 'AS', 'AS',
- 'AS', 'NA', 'AS', 'AS', 'AS', 'NA', 'EU', 'AS', 'AF', 'AF', 'EU', 'EU',
- 'EU', 'AF', 'AF', 'EU', 'EU', 'AF', 'OC', 'EU', 'AF', 'AS', 'AS', 'AS',
- 'OC', 'NA', 'AF', 'NA', 'EU', 'AF', 'AS', 'AF', 'NA', 'AS', 'AF', 'AF',
- 'OC', 'AF', 'OC', 'AF', 'NA', 'EU', 'EU', 'AS', 'OC', 'OC', 'OC', 'AS',
- 'NA', 'SA', 'OC', 'OC', 'AS', 'AS', 'EU', 'NA', 'OC', 'NA', 'AS', 'EU',
- 'OC', 'SA', 'AS', 'AF', 'EU', 'EU', 'AF', 'AS', 'OC', 'AF', 'AF', 'EU',
- 'AS', 'AF', 'EU', 'EU', 'EU', 'AF', 'EU', 'AF', 'AF', 'SA', 'AF', 'NA',
- 'AS', 'AF', 'NA', 'AF', 'AN', 'AF', 'AS', 'AS', 'OC', 'AS', 'AF', 'OC',
- 'AS', 'EU', 'NA', 'OC', 'AS', 'AF', 'EU', 'AF', 'OC', 'NA', 'SA', 'AS',
- 'EU', 'NA', 'SA', 'NA', 'NA', 'AS', 'OC', 'OC', 'OC', 'AS', 'AF', 'EU',
- 'AF', 'AF', 'EU', 'AF', '--', '--', '--', 'EU', 'EU', 'EU', 'EU', 'NA',
- 'NA', 'NA', 'AF'
-)
-
-# storage / caching flags
-STANDARD = 0
-MEMORY_CACHE = 1
-MMAP_CACHE = 8
-
-# Database structure constants
-COUNTRY_BEGIN = 16776960
-STATE_BEGIN_REV0 = 16700000
-STATE_BEGIN_REV1 = 16000000
-
-STRUCTURE_INFO_MAX_SIZE = 20
-DATABASE_INFO_MAX_SIZE = 100
-
-# Database editions
-COUNTRY_EDITION = 1
-COUNTRY_EDITION_V6 = 12
-REGION_EDITION_REV0 = 7
-REGION_EDITION_REV1 = 3
-CITY_EDITION_REV0 = 6
-CITY_EDITION_REV1 = 2
-CITY_EDITION_REV1_V6 = 30
-ORG_EDITION = 5
-ISP_EDITION = 4
-ASNUM_EDITION = 9
-ASNUM_EDITION_V6 = 21
-# Not yet supported databases
-PROXY_EDITION = 8
-NETSPEED_EDITION = 11
-
-# Collection of databases
-IPV6_EDITIONS = (COUNTRY_EDITION_V6, ASNUM_EDITION_V6, CITY_EDITION_REV1_V6)
-CITY_EDITIONS = (CITY_EDITION_REV0, CITY_EDITION_REV1, CITY_EDITION_REV1_V6)
-REGION_EDITIONS = (REGION_EDITION_REV0, REGION_EDITION_REV1)
-REGION_CITY_EDITIONS = REGION_EDITIONS + CITY_EDITIONS
-
-SEGMENT_RECORD_LENGTH = 3
-STANDARD_RECORD_LENGTH = 3
-ORG_RECORD_LENGTH = 4
-MAX_RECORD_LENGTH = 4
-MAX_ORG_RECORD_LENGTH = 300
-FULL_RECORD_LENGTH = 50
-
-US_OFFSET = 1
-CANADA_OFFSET = 677
-WORLD_OFFSET = 1353
-FIPS_RANGE = 360
-ENCODING = 'iso-8859-1'
+"""
+Constants needed for parsing binary GeoIP databases. It is part of the pygeoip
+package.
+
+@author: Jennifer Ennis
+
+@license:
+Copyright(C) 2004 MaxMind LLC
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU Lesser General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public License
+along with this program. If not, see .
+"""
+
+GEOIP_STANDARD = 0
+GEOIP_MEMORY_CACHE = 1
+
+DMA_MAP = {
+ 500 : 'Portland-Auburn, ME',
+ 501 : 'New York, NY',
+ 502 : 'Binghamton, NY',
+ 503 : 'Macon, GA',
+ 504 : 'Philadelphia, PA',
+ 505 : 'Detroit, MI',
+ 506 : 'Boston, MA',
+ 507 : 'Savannah, GA',
+ 508 : 'Pittsburgh, PA',
+ 509 : 'Ft Wayne, IN',
+ 510 : 'Cleveland, OH',
+ 511 : 'Washington, DC',
+ 512 : 'Baltimore, MD',
+ 513 : 'Flint, MI',
+ 514 : 'Buffalo, NY',
+ 515 : 'Cincinnati, OH',
+ 516 : 'Erie, PA',
+ 517 : 'Charlotte, NC',
+ 518 : 'Greensboro, NC',
+ 519 : 'Charleston, SC',
+ 520 : 'Augusta, GA',
+ 521 : 'Providence, RI',
+ 522 : 'Columbus, GA',
+ 523 : 'Burlington, VT',
+ 524 : 'Atlanta, GA',
+ 525 : 'Albany, GA',
+ 526 : 'Utica-Rome, NY',
+ 527 : 'Indianapolis, IN',
+ 528 : 'Miami, FL',
+ 529 : 'Louisville, KY',
+ 530 : 'Tallahassee, FL',
+ 531 : 'Tri-Cities, TN',
+ 532 : 'Albany-Schenectady-Troy, NY',
+ 533 : 'Hartford, CT',
+ 534 : 'Orlando, FL',
+ 535 : 'Columbus, OH',
+ 536 : 'Youngstown-Warren, OH',
+ 537 : 'Bangor, ME',
+ 538 : 'Rochester, NY',
+ 539 : 'Tampa, FL',
+ 540 : 'Traverse City-Cadillac, MI',
+ 541 : 'Lexington, KY',
+ 542 : 'Dayton, OH',
+ 543 : 'Springfield-Holyoke, MA',
+ 544 : 'Norfolk-Portsmouth, VA',
+ 545 : 'Greenville-New Bern-Washington, NC',
+ 546 : 'Columbia, SC',
+ 547 : 'Toledo, OH',
+ 548 : 'West Palm Beach, FL',
+ 549 : 'Watertown, NY',
+ 550 : 'Wilmington, NC',
+ 551 : 'Lansing, MI',
+ 552 : 'Presque Isle, ME',
+ 553 : 'Marquette, MI',
+ 554 : 'Wheeling, WV',
+ 555 : 'Syracuse, NY',
+ 556 : 'Richmond-Petersburg, VA',
+ 557 : 'Knoxville, TN',
+ 558 : 'Lima, OH',
+ 559 : 'Bluefield-Beckley-Oak Hill, WV',
+ 560 : 'Raleigh-Durham, NC',
+ 561 : 'Jacksonville, FL',
+ 563 : 'Grand Rapids, MI',
+ 564 : 'Charleston-Huntington, WV',
+ 565 : 'Elmira, NY',
+ 566 : 'Harrisburg-Lancaster-Lebanon-York, PA',
+ 567 : 'Greenville-Spartenburg, SC',
+ 569 : 'Harrisonburg, VA',
+ 570 : 'Florence-Myrtle Beach, SC',
+ 571 : 'Ft Myers, FL',
+ 573 : 'Roanoke-Lynchburg, VA',
+ 574 : 'Johnstown-Altoona, PA',
+ 575 : 'Chattanooga, TN',
+ 576 : 'Salisbury, MD',
+ 577 : 'Wilkes Barre-Scranton, PA',
+ 581 : 'Terre Haute, IN',
+ 582 : 'Lafayette, IN',
+ 583 : 'Alpena, MI',
+ 584 : 'Charlottesville, VA',
+ 588 : 'South Bend, IN',
+ 592 : 'Gainesville, FL',
+ 596 : 'Zanesville, OH',
+ 597 : 'Parkersburg, WV',
+ 598 : 'Clarksburg-Weston, WV',
+ 600 : 'Corpus Christi, TX',
+ 602 : 'Chicago, IL',
+ 603 : 'Joplin-Pittsburg, MO',
+ 604 : 'Columbia-Jefferson City, MO',
+ 605 : 'Topeka, KS',
+ 606 : 'Dothan, AL',
+ 609 : 'St Louis, MO',
+ 610 : 'Rockford, IL',
+ 611 : 'Rochester-Mason City-Austin, MN',
+ 612 : 'Shreveport, LA',
+ 613 : 'Minneapolis-St Paul, MN',
+ 616 : 'Kansas City, MO',
+ 617 : 'Milwaukee, WI',
+ 618 : 'Houston, TX',
+ 619 : 'Springfield, MO',
+ 620 : 'Tuscaloosa, AL',
+ 622 : 'New Orleans, LA',
+ 623 : 'Dallas-Fort Worth, TX',
+ 624 : 'Sioux City, IA',
+ 625 : 'Waco-Temple-Bryan, TX',
+ 626 : 'Victoria, TX',
+ 627 : 'Wichita Falls, TX',
+ 628 : 'Monroe, LA',
+ 630 : 'Birmingham, AL',
+ 631 : 'Ottumwa-Kirksville, IA',
+ 632 : 'Paducah, KY',
+ 633 : 'Odessa-Midland, TX',
+ 634 : 'Amarillo, TX',
+ 635 : 'Austin, TX',
+ 636 : 'Harlingen, TX',
+ 637 : 'Cedar Rapids-Waterloo, IA',
+ 638 : 'St Joseph, MO',
+ 639 : 'Jackson, TN',
+ 640 : 'Memphis, TN',
+ 641 : 'San Antonio, TX',
+ 642 : 'Lafayette, LA',
+ 643 : 'Lake Charles, LA',
+ 644 : 'Alexandria, LA',
+ 646 : 'Anniston, AL',
+ 647 : 'Greenwood-Greenville, MS',
+ 648 : 'Champaign-Springfield-Decatur, IL',
+ 649 : 'Evansville, IN',
+ 650 : 'Oklahoma City, OK',
+ 651 : 'Lubbock, TX',
+ 652 : 'Omaha, NE',
+ 656 : 'Panama City, FL',
+ 657 : 'Sherman, TX',
+ 658 : 'Green Bay-Appleton, WI',
+ 659 : 'Nashville, TN',
+ 661 : 'San Angelo, TX',
+ 662 : 'Abilene-Sweetwater, TX',
+ 669 : 'Madison, WI',
+ 670 : 'Ft Smith-Fay-Springfield, AR',
+ 671 : 'Tulsa, OK',
+ 673 : 'Columbus-Tupelo-West Point, MS',
+ 675 : 'Peoria-Bloomington, IL',
+ 676 : 'Duluth, MN',
+ 678 : 'Wichita, KS',
+ 679 : 'Des Moines, IA',
+ 682 : 'Davenport-Rock Island-Moline, IL',
+ 686 : 'Mobile, AL',
+ 687 : 'Minot-Bismarck-Dickinson, ND',
+ 691 : 'Huntsville, AL',
+ 692 : 'Beaumont-Port Author, TX',
+ 693 : 'Little Rock-Pine Bluff, AR',
+ 698 : 'Montgomery, AL',
+ 702 : 'La Crosse-Eau Claire, WI',
+ 705 : 'Wausau-Rhinelander, WI',
+ 709 : 'Tyler-Longview, TX',
+ 710 : 'Hattiesburg-Laurel, MS',
+ 711 : 'Meridian, MS',
+ 716 : 'Baton Rouge, LA',
+ 717 : 'Quincy, IL',
+ 718 : 'Jackson, MS',
+ 722 : 'Lincoln-Hastings, NE',
+ 724 : 'Fargo-Valley City, ND',
+ 725 : 'Sioux Falls, SD',
+ 734 : 'Jonesboro, AR',
+ 736 : 'Bowling Green, KY',
+ 737 : 'Mankato, MN',
+ 740 : 'North Platte, NE',
+ 743 : 'Anchorage, AK',
+ 744 : 'Honolulu, HI',
+ 745 : 'Fairbanks, AK',
+ 746 : 'Biloxi-Gulfport, MS',
+ 747 : 'Juneau, AK',
+ 749 : 'Laredo, TX',
+ 751 : 'Denver, CO',
+ 752 : 'Colorado Springs, CO',
+ 753 : 'Phoenix, AZ',
+ 754 : 'Butte-Bozeman, MT',
+ 755 : 'Great Falls, MT',
+ 756 : 'Billings, MT',
+ 757 : 'Boise, ID',
+ 758 : 'Idaho Falls-Pocatello, ID',
+ 759 : 'Cheyenne, WY',
+ 760 : 'Twin Falls, ID',
+ 762 : 'Missoula, MT',
+ 764 : 'Rapid City, SD',
+ 765 : 'El Paso, TX',
+ 766 : 'Helena, MT',
+ 767 : 'Casper-Riverton, WY',
+ 770 : 'Salt Lake City, UT',
+ 771 : 'Yuma, AZ',
+ 773 : 'Grand Junction, CO',
+ 789 : 'Tucson, AZ',
+ 790 : 'Albuquerque, NM',
+ 798 : 'Glendive, MT',
+ 800 : 'Bakersfield, CA',
+ 801 : 'Eugene, OR',
+ 802 : 'Eureka, CA',
+ 803 : 'Los Angeles, CA',
+ 804 : 'Palm Springs, CA',
+ 807 : 'San Francisco, CA',
+ 810 : 'Yakima-Pasco, WA',
+ 811 : 'Reno, NV',
+ 813 : 'Medford-Klamath Falls, OR',
+ 819 : 'Seattle-Tacoma, WA',
+ 820 : 'Portland, OR',
+ 821 : 'Bend, OR',
+ 825 : 'San Diego, CA',
+ 828 : 'Monterey-Salinas, CA',
+ 839 : 'Las Vegas, NV',
+ 855 : 'Santa Barbara, CA',
+ 862 : 'Sacramento, CA',
+ 866 : 'Fresno, CA',
+ 868 : 'Chico-Redding, CA',
+ 881 : 'Spokane, WA'
+ }
+
+COUNTRY_CODES = (
+ '', 'AP', 'EU', 'AD', 'AE', 'AF', 'AG', 'AI', 'AL', 'AM', 'AN', 'AO', 'AQ',
+ 'AR', 'AS', 'AT', 'AU', 'AW', 'AZ', 'BA', 'BB', 'BD', 'BE', 'BF', 'BG', 'BH',
+ 'BI', 'BJ', 'BM', 'BN', 'BO', 'BR', 'BS', 'BT', 'BV', 'BW', 'BY', 'BZ', 'CA',
+ 'CC', 'CD', 'CF', 'CG', 'CH', 'CI', 'CK', 'CL', 'CM', 'CN', 'CO', 'CR', 'CU',
+ 'CV', 'CX', 'CY', 'CZ', 'DE', 'DJ', 'DK', 'DM', 'DO', 'DZ', 'EC', 'EE', 'EG',
+ 'EH', 'ER', 'ES', 'ET', 'FI', 'FJ', 'FK', 'FM', 'FO', 'FR', 'FX', 'GA', 'GB',
+ 'GD', 'GE', 'GF', 'GH', 'GI', 'GL', 'GM', 'GN', 'GP', 'GQ', 'GR', 'GS', 'GT',
+ 'GU', 'GW', 'GY', 'HK', 'HM', 'HN', 'HR', 'HT', 'HU', 'ID', 'IE', 'IL', 'IN',
+ 'IO', 'IQ', 'IR', 'IS', 'IT', 'JM', 'JO', 'JP', 'KE', 'KG', 'KH', 'KI', 'KM',
+ 'KN', 'KP', 'KR', 'KW', 'KY', 'KZ', 'LA', 'LB', 'LC', 'LI', 'LK', 'LR', 'LS',
+ 'LT', 'LU', 'LV', 'LY', 'MA', 'MC', 'MD', 'MG', 'MH', 'MK', 'ML', 'MM', 'MN',
+ 'MO', 'MP', 'MQ', 'MR', 'MS', 'MT', 'MU', 'MV', 'MW', 'MX', 'MY', 'MZ', 'NA',
+ 'NC', 'NE', 'NF', 'NG', 'NI', 'NL', 'NO', 'NP', 'NR', 'NU', 'NZ', 'OM', 'PA',
+ 'PE', 'PF', 'PG', 'PH', 'PK', 'PL', 'PM', 'PN', 'PR', 'PS', 'PT', 'PW', 'PY',
+ 'QA', 'RE', 'RO', 'RU', 'RW', 'SA', 'SB', 'SC', 'SD', 'SE', 'SG', 'SH', 'SI',
+ 'SJ', 'SK', 'SL', 'SM', 'SN', 'SO', 'SR', 'ST', 'SV', 'SY', 'SZ', 'TC', 'TD',
+ 'TF', 'TG', 'TH', 'TJ', 'TK', 'TM', 'TN', 'TO', 'TL', 'TR', 'TT', 'TV', 'TW',
+ 'TZ', 'UA', 'UG', 'UM', 'US', 'UY', 'UZ', 'VA', 'VC', 'VE', 'VG', 'VI', 'VN',
+ 'VU', 'WF', 'WS', 'YE', 'YT', 'RS', 'ZA', 'ZM', 'ME', 'ZW', 'A1', 'A2', 'O1',
+ 'AX', 'GG', 'IM', 'JE', 'BL', 'MF'
+ )
+
+COUNTRY_CODES3 = (
+ '','AP','EU','AND','ARE','AFG','ATG','AIA','ALB','ARM','ANT','AGO','AQ','ARG',
+ 'ASM','AUT','AUS','ABW','AZE','BIH','BRB','BGD','BEL','BFA','BGR','BHR','BDI',
+ 'BEN','BMU','BRN','BOL','BRA','BHS','BTN','BV','BWA','BLR','BLZ','CAN','CC',
+ 'COD','CAF','COG','CHE','CIV','COK','CHL','CMR','CHN','COL','CRI','CUB','CPV',
+ 'CX','CYP','CZE','DEU','DJI','DNK','DMA','DOM','DZA','ECU','EST','EGY','ESH',
+ 'ERI','ESP','ETH','FIN','FJI','FLK','FSM','FRO','FRA','FX','GAB','GBR','GRD',
+ 'GEO','GUF','GHA','GIB','GRL','GMB','GIN','GLP','GNQ','GRC','GS','GTM','GUM',
+ 'GNB','GUY','HKG','HM','HND','HRV','HTI','HUN','IDN','IRL','ISR','IND','IO',
+ 'IRQ','IRN','ISL','ITA','JAM','JOR','JPN','KEN','KGZ','KHM','KIR','COM','KNA',
+ 'PRK','KOR','KWT','CYM','KAZ','LAO','LBN','LCA','LIE','LKA','LBR','LSO','LTU',
+ 'LUX','LVA','LBY','MAR','MCO','MDA','MDG','MHL','MKD','MLI','MMR','MNG','MAC',
+ 'MNP','MTQ','MRT','MSR','MLT','MUS','MDV','MWI','MEX','MYS','MOZ','NAM','NCL',
+ 'NER','NFK','NGA','NIC','NLD','NOR','NPL','NRU','NIU','NZL','OMN','PAN','PER',
+ 'PYF','PNG','PHL','PAK','POL','SPM','PCN','PRI','PSE','PRT','PLW','PRY','QAT',
+ 'REU','ROU','RUS','RWA','SAU','SLB','SYC','SDN','SWE','SGP','SHN','SVN','SJM',
+ 'SVK','SLE','SMR','SEN','SOM','SUR','STP','SLV','SYR','SWZ','TCA','TCD','TF',
+ 'TGO','THA','TJK','TKL','TLS','TKM','TUN','TON','TUR','TTO','TUV','TWN','TZA',
+ 'UKR','UGA','UM','USA','URY','UZB','VAT','VCT','VEN','VGB','VIR','VNM','VUT',
+ 'WLF','WSM','YEM','YT','SRB','ZAF','ZMB','MNE','ZWE','A1','A2','O1',
+ 'ALA','GGY','IMN','JEY','BLM','MAF'
+ )
+
+COUNTRY_NAMES = (
+ "", "Asia/Pacific Region", "Europe", "Andorra", "United Arab Emirates",
+ "Afghanistan", "Antigua and Barbuda", "Anguilla", "Albania", "Armenia",
+ "Netherlands Antilles", "Angola", "Antarctica", "Argentina", "American Samoa",
+ "Austria", "Australia", "Aruba", "Azerbaijan", "Bosnia and Herzegovina",
+ "Barbados", "Bangladesh", "Belgium", "Burkina Faso", "Bulgaria", "Bahrain",
+ "Burundi", "Benin", "Bermuda", "Brunei Darussalam", "Bolivia", "Brazil",
+ "Bahamas", "Bhutan", "Bouvet Island", "Botswana", "Belarus", "Belize",
+ "Canada", "Cocos (Keeling) Islands", "Congo, The Democratic Republic of the",
+ "Central African Republic", "Congo", "Switzerland", "Cote D'Ivoire", "Cook Islands",
+ "Chile", "Cameroon", "China", "Colombia", "Costa Rica", "Cuba", "Cape Verde",
+ "Christmas Island", "Cyprus", "Czech Republic", "Germany", "Djibouti",
+ "Denmark", "Dominica", "Dominican Republic", "Algeria", "Ecuador", "Estonia",
+ "Egypt", "Western Sahara", "Eritrea", "Spain", "Ethiopia", "Finland", "Fiji",
+ "Falkland Islands (Malvinas)", "Micronesia, Federated States of", "Faroe Islands",
+ "France", "France, Metropolitan", "Gabon", "United Kingdom",
+ "Grenada", "Georgia", "French Guiana", "Ghana", "Gibraltar", "Greenland",
+ "Gambia", "Guinea", "Guadeloupe", "Equatorial Guinea", "Greece",
+ "South Georgia and the South Sandwich Islands",
+ "Guatemala", "Guam", "Guinea-Bissau",
+ "Guyana", "Hong Kong", "Heard Island and McDonald Islands", "Honduras",
+ "Croatia", "Haiti", "Hungary", "Indonesia", "Ireland", "Israel", "India",
+ "British Indian Ocean Territory", "Iraq", "Iran, Islamic Republic of",
+ "Iceland", "Italy", "Jamaica", "Jordan", "Japan", "Kenya", "Kyrgyzstan",
+ "Cambodia", "Kiribati", "Comoros", "Saint Kitts and Nevis",
+ "Korea, Democratic People's Republic of",
+ "Korea, Republic of", "Kuwait", "Cayman Islands",
+ "Kazakstan", "Lao People's Democratic Republic", "Lebanon", "Saint Lucia",
+ "Liechtenstein", "Sri Lanka", "Liberia", "Lesotho", "Lithuania", "Luxembourg",
+ "Latvia", "Libyan Arab Jamahiriya", "Morocco", "Monaco", "Moldova, Republic of",
+ "Madagascar", "Marshall Islands", "Macedonia",
+ "Mali", "Myanmar", "Mongolia", "Macau", "Northern Mariana Islands",
+ "Martinique", "Mauritania", "Montserrat", "Malta", "Mauritius", "Maldives",
+ "Malawi", "Mexico", "Malaysia", "Mozambique", "Namibia", "New Caledonia",
+ "Niger", "Norfolk Island", "Nigeria", "Nicaragua", "Netherlands", "Norway",
+ "Nepal", "Nauru", "Niue", "New Zealand", "Oman", "Panama", "Peru", "French Polynesia",
+ "Papua New Guinea", "Philippines", "Pakistan", "Poland", "Saint Pierre and Miquelon",
+ "Pitcairn Islands", "Puerto Rico", "Palestinian Territory",
+ "Portugal", "Palau", "Paraguay", "Qatar", "Reunion", "Romania",
+ "Russian Federation", "Rwanda", "Saudi Arabia", "Solomon Islands",
+ "Seychelles", "Sudan", "Sweden", "Singapore", "Saint Helena", "Slovenia",
+ "Svalbard and Jan Mayen", "Slovakia", "Sierra Leone", "San Marino", "Senegal",
+ "Somalia", "Suriname", "Sao Tome and Principe", "El Salvador", "Syrian Arab Republic",
+ "Swaziland", "Turks and Caicos Islands", "Chad", "French Southern Territories",
+ "Togo", "Thailand", "Tajikistan", "Tokelau", "Turkmenistan",
+ "Tunisia", "Tonga", "Timor-Leste", "Turkey", "Trinidad and Tobago", "Tuvalu",
+ "Taiwan", "Tanzania, United Republic of", "Ukraine",
+ "Uganda", "United States Minor Outlying Islands", "United States", "Uruguay",
+ "Uzbekistan", "Holy See (Vatican City State)", "Saint Vincent and the Grenadines",
+ "Venezuela", "Virgin Islands, British", "Virgin Islands, U.S.",
+ "Vietnam", "Vanuatu", "Wallis and Futuna", "Samoa", "Yemen", "Mayotte",
+ "Serbia", "South Africa", "Zambia", "Montenegro", "Zimbabwe",
+ "Anonymous Proxy","Satellite Provider","Other",
+ "Aland Islands","Guernsey","Isle of Man","Jersey","Saint Barthelemy","Saint Martin"
+ )
+
+# storage / caching flags
+STANDARD = 0
+MEMORY_CACHE = 1
+MMAP_CACHE = 8
+
+# Database structure constants
+COUNTRY_BEGIN = 16776960
+STATE_BEGIN_REV0 = 16700000
+STATE_BEGIN_REV1 = 16000000
+
+STRUCTURE_INFO_MAX_SIZE = 20
+DATABASE_INFO_MAX_SIZE = 100
+
+# Database editions
+COUNTRY_EDITION = 1
+REGION_EDITION_REV0 = 7
+REGION_EDITION_REV1 = 3
+CITY_EDITION_REV0 = 6
+CITY_EDITION_REV1 = 2
+ORG_EDITION = 5
+ISP_EDITION = 4
+PROXY_EDITION = 8
+ASNUM_EDITION = 9
+NETSPEED_EDITION = 11
+COUNTRY_EDITION_V6 = 12
+
+SEGMENT_RECORD_LENGTH = 3
+STANDARD_RECORD_LENGTH = 3
+ORG_RECORD_LENGTH = 4
+MAX_RECORD_LENGTH = 4
+MAX_ORG_RECORD_LENGTH = 300
+FULL_RECORD_LENGTH = 50
+
+US_OFFSET = 1
+CANADA_OFFSET = 677
+WORLD_OFFSET = 1353
+FIPS_RANGE = 360
+
+
diff --git a/lib/pygeoip/timezone.py b/lib/pygeoip/timezone.py
index 6346709..033c8d6 100644
--- a/lib/pygeoip/timezone.py
+++ b/lib/pygeoip/timezone.py
@@ -1,760 +1,714 @@
-# -*- coding: utf-8 -*-
-"""
-Time zone functions. Part of the pygeoip package.
-
-@author: Jennifer Ennis
-
-@license: Copyright(C) 2004 MaxMind LLC
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU Lesser General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU Lesser General Public License
-along with this program. If not, see .
-"""
-
__all__ = ['time_zone_by_country_and_region']
-_country = {
- 'AD': 'Europe/Andorra',
- 'AE': 'Asia/Dubai',
- 'AF': 'Asia/Kabul',
- 'AG': 'America/Antigua',
- 'AI': 'America/Anguilla',
- 'AL': 'Europe/Tirane',
- 'AM': 'Asia/Yerevan',
- 'AN': 'America/Curacao',
- 'AO': 'Africa/Luanda',
- 'AR': {
- '01': 'America/Argentina/Buenos_Aires',
- '02': 'America/Argentina/Catamarca',
- '03': 'America/Argentina/Tucuman',
- '04': 'America/Argentina/Rio_Gallegos',
- '05': 'America/Argentina/Cordoba',
- '06': 'America/Argentina/Tucuman',
- '07': 'America/Argentina/Buenos_Aires',
- '08': 'America/Argentina/Buenos_Aires',
- '09': 'America/Argentina/Tucuman',
- '10': 'America/Argentina/Jujuy',
- '11': 'America/Argentina/San_Luis',
- '12': 'America/Argentina/La_Rioja',
- '13': 'America/Argentina/Mendoza',
- '14': 'America/Argentina/Buenos_Aires',
- '15': 'America/Argentina/San_Luis',
- '16': 'America/Argentina/Buenos_Aires',
- '17': 'America/Argentina/Salta',
- '18': 'America/Argentina/San_Juan',
- '19': 'America/Argentina/San_Luis',
- '20': 'America/Argentina/Rio_Gallegos',
- '21': 'America/Argentina/Buenos_Aires',
- '22': 'America/Argentina/Catamarca',
- '23': 'America/Argentina/Ushuaia',
- '24': 'America/Argentina/Tucuman'
- },
- 'AS': 'US/Samoa',
- 'AT': 'Europe/Vienna',
- 'AU': {
- '01': 'Australia/Canberra',
- '02': 'Australia/NSW',
- '03': 'Australia/North',
- '04': 'Australia/Queensland',
- '05': 'Australia/South',
- '06': 'Australia/Tasmania',
- '07': 'Australia/Victoria',
- '08': 'Australia/West'
- },
- 'AW': 'America/Aruba',
- 'AX': 'Europe/Mariehamn',
- 'AZ': 'Asia/Baku',
- 'BA': 'Europe/Sarajevo',
- 'BB': 'America/Barbados',
- 'BD': 'Asia/Dhaka',
- 'BE': 'Europe/Brussels',
- 'BF': 'Africa/Ouagadougou',
- 'BG': 'Europe/Sofia',
- 'BH': 'Asia/Bahrain',
- 'BI': 'Africa/Bujumbura',
- 'BJ': 'Africa/Porto-Novo',
- 'BL': 'America/St_Barthelemy',
- 'BM': 'Atlantic/Bermuda',
- 'BN': 'Asia/Brunei',
- 'BO': 'America/La_Paz',
- 'BQ': 'America/Curacao',
- 'BR': {
- '01': 'America/Rio_Branco',
- '02': 'America/Maceio',
- '03': 'America/Sao_Paulo',
- '04': 'America/Manaus',
- '05': 'America/Bahia',
- '06': 'America/Fortaleza',
- '07': 'America/Sao_Paulo',
- '08': 'America/Sao_Paulo',
- '11': 'America/Campo_Grande',
- '13': 'America/Belem',
- '14': 'America/Cuiaba',
- '15': 'America/Sao_Paulo',
- '16': 'America/Belem',
- '17': 'America/Recife',
- '18': 'America/Sao_Paulo',
- '20': 'America/Fortaleza',
- '21': 'America/Sao_Paulo',
- '22': 'America/Recife',
- '23': 'America/Sao_Paulo',
- '24': 'America/Porto_Velho',
- '25': 'America/Boa_Vista',
- '26': 'America/Sao_Paulo',
- '27': 'America/Sao_Paulo',
- '28': 'America/Maceio',
- '29': 'America/Sao_Paulo',
- '30': 'America/Recife',
- '31': 'America/Araguaina'
- },
- 'BS': 'America/Nassau',
- 'BT': 'Asia/Thimphu',
- 'BW': 'Africa/Gaborone',
- 'BY': 'Europe/Minsk',
- 'BZ': 'America/Belize',
- 'CA': {
- 'AB': 'America/Edmonton',
- 'BC': 'America/Vancouver',
- 'MB': 'America/Winnipeg',
- 'NB': 'America/Halifax',
- 'NL': 'America/St_Johns',
- 'NS': 'America/Halifax',
- 'NT': 'America/Yellowknife',
- 'NU': 'America/Rankin_Inlet',
- 'ON': 'America/Toronto',
- 'PE': 'America/Halifax',
- 'QC': 'America/Montreal',
- 'SK': 'America/Regina',
- 'YT': 'America/Whitehorse'
- },
- 'CC': 'Indian/Cocos',
- 'CD': {
- '02': 'Africa/Kinshasa',
- '05': 'Africa/Lubumbashi',
- '06': 'Africa/Kinshasa',
- '08': 'Africa/Kinshasa',
- '10': 'Africa/Lubumbashi',
- '11': 'Africa/Lubumbashi',
- '12': 'Africa/Lubumbashi'
- },
- 'CF': 'Africa/Bangui',
- 'CG': 'Africa/Brazzaville',
- 'CH': 'Europe/Zurich',
- 'CI': 'Africa/Abidjan',
- 'CK': 'Pacific/Rarotonga',
- 'CL': 'Chile/Continental',
- 'CM': 'Africa/Lagos',
- 'CN': {
- '01': 'Asia/Shanghai',
- '02': 'Asia/Shanghai',
- '03': 'Asia/Shanghai',
- '04': 'Asia/Shanghai',
- '05': 'Asia/Harbin',
- '06': 'Asia/Chongqing',
- '07': 'Asia/Shanghai',
- '08': 'Asia/Harbin',
- '09': 'Asia/Shanghai',
- '10': 'Asia/Shanghai',
- '11': 'Asia/Chongqing',
- '12': 'Asia/Shanghai',
- '13': 'Asia/Urumqi',
- '14': 'Asia/Chongqing',
- '15': 'Asia/Chongqing',
- '16': 'Asia/Chongqing',
- '18': 'Asia/Chongqing',
- '19': 'Asia/Harbin',
- '20': 'Asia/Harbin',
- '21': 'Asia/Chongqing',
- '22': 'Asia/Harbin',
- '23': 'Asia/Shanghai',
- '24': 'Asia/Chongqing',
- '25': 'Asia/Shanghai',
- '26': 'Asia/Chongqing',
- '28': 'Asia/Shanghai',
- '29': 'Asia/Chongqing',
- '30': 'Asia/Chongqing',
- '31': 'Asia/Chongqing',
- '32': 'Asia/Chongqing',
- '33': 'Asia/Chongqing'
- },
- 'CO': 'America/Bogota',
- 'CR': 'America/Costa_Rica',
- 'CU': 'America/Havana',
- 'CV': 'Atlantic/Cape_Verde',
- 'CW': 'America/Curacao',
- 'CX': 'Indian/Christmas',
- 'CY': 'Asia/Nicosia',
- 'CZ': 'Europe/Prague',
- 'DE': 'Europe/Berlin',
- 'DJ': 'Africa/Djibouti',
- 'DK': 'Europe/Copenhagen',
- 'DM': 'America/Dominica',
- 'DO': 'America/Santo_Domingo',
- 'DZ': 'Africa/Algiers',
- 'EC': {
- '01': 'Pacific/Galapagos',
- '02': 'America/Guayaquil',
- '03': 'America/Guayaquil',
- '04': 'America/Guayaquil',
- '05': 'America/Guayaquil',
- '06': 'America/Guayaquil',
- '07': 'America/Guayaquil',
- '08': 'America/Guayaquil',
- '09': 'America/Guayaquil',
- '10': 'America/Guayaquil',
- '11': 'America/Guayaquil',
- '12': 'America/Guayaquil',
- '13': 'America/Guayaquil',
- '14': 'America/Guayaquil',
- '15': 'America/Guayaquil',
- '17': 'America/Guayaquil',
- '18': 'America/Guayaquil',
- '19': 'America/Guayaquil',
- '20': 'America/Guayaquil',
- '22': 'America/Guayaquil'
- },
- 'EE': 'Europe/Tallinn',
- 'EG': 'Africa/Cairo',
- 'EH': 'Africa/El_Aaiun',
- 'ER': 'Africa/Asmera',
- 'ES': {
- '07': 'Europe/Madrid',
- '27': 'Europe/Madrid',
- '29': 'Europe/Madrid',
- '31': 'Europe/Madrid',
- '32': 'Europe/Madrid',
- '34': 'Europe/Madrid',
- '39': 'Europe/Madrid',
- '51': 'Africa/Ceuta',
- '52': 'Europe/Madrid',
- '53': 'Atlantic/Canary',
- '54': 'Europe/Madrid',
- '55': 'Europe/Madrid',
- '56': 'Europe/Madrid',
- '57': 'Europe/Madrid',
- '58': 'Europe/Madrid',
- '59': 'Europe/Madrid',
- '60': 'Europe/Madrid'
- },
- 'ET': 'Africa/Addis_Ababa',
- 'FI': 'Europe/Helsinki',
- 'FJ': 'Pacific/Fiji',
- 'FK': 'Atlantic/Stanley',
- 'FO': 'Atlantic/Faeroe',
- 'FR': 'Europe/Paris',
- 'FX': 'Europe/Paris',
- 'GA': 'Africa/Libreville',
- 'GB': 'Europe/London',
- 'GD': 'America/Grenada',
- 'GE': 'Asia/Tbilisi',
- 'GF': 'America/Cayenne',
- 'GG': 'Europe/Guernsey',
- 'GH': 'Africa/Accra',
- 'GI': 'Europe/Gibraltar',
- 'GL': {
- '01': 'America/Thule',
- '02': 'America/Godthab',
- '03': 'America/Godthab'
- },
- 'GM': 'Africa/Banjul',
- 'GN': 'Africa/Conakry',
- 'GP': 'America/Guadeloupe',
- 'GQ': 'Africa/Malabo',
- 'GR': 'Europe/Athens',
- 'GS': 'Atlantic/South_Georgia',
- 'GT': 'America/Guatemala',
- 'GU': 'Pacific/Guam',
- 'GW': 'Africa/Bissau',
- 'GY': 'America/Guyana',
- 'HK': 'Asia/Hong_Kong',
- 'HN': 'America/Tegucigalpa',
- 'HR': 'Europe/Zagreb',
- 'HT': 'America/Port-au-Prince',
- 'HU': 'Europe/Budapest',
- 'ID': {
- '01': 'Asia/Pontianak',
- '02': 'Asia/Makassar',
- '03': 'Asia/Jakarta',
- '04': 'Asia/Jakarta',
- '05': 'Asia/Jakarta',
- '06': 'Asia/Jakarta',
- '07': 'Asia/Jakarta',
- '08': 'Asia/Jakarta',
- '09': 'Asia/Jayapura',
- '10': 'Asia/Jakarta',
- '11': 'Asia/Pontianak',
- '12': 'Asia/Makassar',
- '13': 'Asia/Makassar',
- '14': 'Asia/Makassar',
- '15': 'Asia/Jakarta',
- '16': 'Asia/Makassar',
- '17': 'Asia/Makassar',
- '18': 'Asia/Makassar',
- '19': 'Asia/Pontianak',
- '20': 'Asia/Makassar',
- '21': 'Asia/Makassar',
- '22': 'Asia/Makassar',
- '23': 'Asia/Makassar',
- '24': 'Asia/Jakarta',
- '25': 'Asia/Pontianak',
- '26': 'Asia/Pontianak',
- '30': 'Asia/Jakarta',
- '31': 'Asia/Makassar',
- '33': 'Asia/Jakarta'
- },
- 'IE': 'Europe/Dublin',
- 'IL': 'Asia/Jerusalem',
- 'IM': 'Europe/Isle_of_Man',
- 'IN': 'Asia/Calcutta',
- 'IO': 'Indian/Chagos',
- 'IQ': 'Asia/Baghdad',
- 'IR': 'Asia/Tehran',
- 'IS': 'Atlantic/Reykjavik',
- 'IT': 'Europe/Rome',
- 'JE': 'Europe/Jersey',
- 'JM': 'America/Jamaica',
- 'JO': 'Asia/Amman',
- 'JP': 'Asia/Tokyo',
- 'KE': 'Africa/Nairobi',
- 'KG': 'Asia/Bishkek',
- 'KH': 'Asia/Phnom_Penh',
- 'KI': 'Pacific/Tarawa',
- 'KM': 'Indian/Comoro',
- 'KN': 'America/St_Kitts',
- 'KP': 'Asia/Pyongyang',
- 'KR': 'Asia/Seoul',
- 'KW': 'Asia/Kuwait',
- 'KY': 'America/Cayman',
- 'KZ': {
- '01': 'Asia/Almaty',
- '02': 'Asia/Almaty',
- '03': 'Asia/Qyzylorda',
- '04': 'Asia/Aqtobe',
- '05': 'Asia/Qyzylorda',
- '06': 'Asia/Aqtau',
- '07': 'Asia/Oral',
- '08': 'Asia/Qyzylorda',
- '09': 'Asia/Aqtau',
- '10': 'Asia/Qyzylorda',
- '11': 'Asia/Almaty',
- '12': 'Asia/Qyzylorda',
- '13': 'Asia/Aqtobe',
- '14': 'Asia/Qyzylorda',
- '15': 'Asia/Almaty',
- '16': 'Asia/Aqtobe',
- '17': 'Asia/Almaty'
- },
- 'LA': 'Asia/Vientiane',
- 'LB': 'Asia/Beirut',
- 'LC': 'America/St_Lucia',
- 'LI': 'Europe/Vaduz',
- 'LK': 'Asia/Colombo',
- 'LR': 'Africa/Monrovia',
- 'LS': 'Africa/Maseru',
- 'LT': 'Europe/Vilnius',
- 'LU': 'Europe/Luxembourg',
- 'LV': 'Europe/Riga',
- 'LY': 'Africa/Tripoli',
- 'MA': 'Africa/Casablanca',
- 'MC': 'Europe/Monaco',
- 'MD': 'Europe/Chisinau',
- 'ME': 'Europe/Podgorica',
- 'MF': 'America/Marigot',
- 'MG': 'Indian/Antananarivo',
- 'MK': 'Europe/Skopje',
- 'ML': 'Africa/Bamako',
- 'MM': 'Asia/Rangoon',
- 'MN': 'Asia/Choibalsan',
- 'MO': 'Asia/Macao',
- 'MP': 'Pacific/Saipan',
- 'MQ': 'America/Martinique',
- 'MR': 'Africa/Nouakchott',
- 'MS': 'America/Montserrat',
- 'MT': 'Europe/Malta',
- 'MU': 'Indian/Mauritius',
- 'MV': 'Indian/Maldives',
- 'MW': 'Africa/Blantyre',
- 'MX': {
- '01': 'America/Mexico_City',
- '02': 'America/Tijuana',
- '03': 'America/Hermosillo',
- '04': 'America/Merida',
- '05': 'America/Mexico_City',
- '06': 'America/Chihuahua',
- '07': 'America/Monterrey',
- '08': 'America/Mexico_City',
- '09': 'America/Mexico_City',
- '10': 'America/Mazatlan',
- '11': 'America/Mexico_City',
- '12': 'America/Mexico_City',
- '13': 'America/Mexico_City',
- '14': 'America/Mazatlan',
- '15': 'America/Chihuahua',
- '16': 'America/Mexico_City',
- '17': 'America/Mexico_City',
- '18': 'America/Mazatlan',
- '19': 'America/Monterrey',
- '20': 'America/Mexico_City',
- '21': 'America/Mexico_City',
- '22': 'America/Mexico_City',
- '23': 'America/Cancun',
- '24': 'America/Mexico_City',
- '25': 'America/Mazatlan',
- '26': 'America/Hermosillo',
- '27': 'America/Merida',
- '28': 'America/Monterrey',
- '29': 'America/Mexico_City',
- '30': 'America/Mexico_City',
- '31': 'America/Merida',
- '32': 'America/Monterrey'
- },
- 'MY': {
- '01': 'Asia/Kuala_Lumpur',
- '02': 'Asia/Kuala_Lumpur',
- '03': 'Asia/Kuala_Lumpur',
- '04': 'Asia/Kuala_Lumpur',
- '05': 'Asia/Kuala_Lumpur',
- '06': 'Asia/Kuala_Lumpur',
- '07': 'Asia/Kuala_Lumpur',
- '08': 'Asia/Kuala_Lumpur',
- '09': 'Asia/Kuala_Lumpur',
- '11': 'Asia/Kuching',
- '12': 'Asia/Kuala_Lumpur',
- '13': 'Asia/Kuala_Lumpur',
- '14': 'Asia/Kuala_Lumpur',
- '15': 'Asia/Kuching',
- '16': 'Asia/Kuching'
- },
- 'MZ': 'Africa/Maputo',
- 'NA': 'Africa/Windhoek',
- 'NC': 'Pacific/Noumea',
- 'NE': 'Africa/Niamey',
- 'NF': 'Pacific/Norfolk',
- 'NG': 'Africa/Lagos',
- 'NI': 'America/Managua',
- 'NL': 'Europe/Amsterdam',
- 'NO': 'Europe/Oslo',
- 'NP': 'Asia/Katmandu',
- 'NR': 'Pacific/Nauru',
- 'NU': 'Pacific/Niue',
- 'NZ': {
- '85': 'Pacific/Auckland',
- 'E7': 'Pacific/Auckland',
- 'E8': 'Pacific/Auckland',
- 'E9': 'Pacific/Auckland',
- 'F1': 'Pacific/Auckland',
- 'F2': 'Pacific/Auckland',
- 'F3': 'Pacific/Auckland',
- 'F4': 'Pacific/Auckland',
- 'F5': 'Pacific/Auckland',
- 'F7': 'Pacific/Chatham',
- 'F8': 'Pacific/Auckland',
- 'F9': 'Pacific/Auckland',
- 'G1': 'Pacific/Auckland',
- 'G2': 'Pacific/Auckland',
- 'G3': 'Pacific/Auckland'
- },
- 'OM': 'Asia/Muscat',
- 'PA': 'America/Panama',
- 'PE': 'America/Lima',
- 'PF': 'Pacific/Marquesas',
- 'PG': 'Pacific/Port_Moresby',
- 'PH': 'Asia/Manila',
- 'PK': 'Asia/Karachi',
- 'PL': 'Europe/Warsaw',
- 'PM': 'America/Miquelon',
- 'PN': 'Pacific/Pitcairn',
- 'PR': 'America/Puerto_Rico',
- 'PS': 'Asia/Gaza',
- 'PT': {
- '02': 'Europe/Lisbon',
- '03': 'Europe/Lisbon',
- '04': 'Europe/Lisbon',
- '05': 'Europe/Lisbon',
- '06': 'Europe/Lisbon',
- '07': 'Europe/Lisbon',
- '08': 'Europe/Lisbon',
- '09': 'Europe/Lisbon',
- '10': 'Atlantic/Madeira',
- '11': 'Europe/Lisbon',
- '13': 'Europe/Lisbon',
- '14': 'Europe/Lisbon',
- '16': 'Europe/Lisbon',
- '17': 'Europe/Lisbon',
- '18': 'Europe/Lisbon',
- '19': 'Europe/Lisbon',
- '20': 'Europe/Lisbon',
- '21': 'Europe/Lisbon',
- '22': 'Europe/Lisbon'
- },
- 'PW': 'Pacific/Palau',
- 'PY': 'America/Asuncion',
- 'QA': 'Asia/Qatar',
- 'RE': 'Indian/Reunion',
- 'RO': 'Europe/Bucharest',
- 'RS': 'Europe/Belgrade',
- 'RU': {
- '01': 'Europe/Volgograd',
- '02': 'Asia/Irkutsk',
- '03': 'Asia/Novokuznetsk',
- '04': 'Asia/Novosibirsk',
- '05': 'Asia/Vladivostok',
- '06': 'Europe/Moscow',
- '07': 'Europe/Volgograd',
- '08': 'Europe/Samara',
- '09': 'Europe/Moscow',
- '10': 'Europe/Moscow',
- '11': 'Asia/Irkutsk',
- '13': 'Asia/Yekaterinburg',
- '14': 'Asia/Irkutsk',
- '15': 'Asia/Anadyr',
- '16': 'Europe/Samara',
- '17': 'Europe/Volgograd',
- '18': 'Asia/Krasnoyarsk',
- '20': 'Asia/Irkutsk',
- '21': 'Europe/Moscow',
- '22': 'Europe/Volgograd',
- '23': 'Europe/Kaliningrad',
- '24': 'Europe/Volgograd',
- '25': 'Europe/Moscow',
- '26': 'Asia/Kamchatka',
- '27': 'Europe/Volgograd',
- '28': 'Europe/Moscow',
- '29': 'Asia/Novokuznetsk',
- '30': 'Asia/Vladivostok',
- '31': 'Asia/Krasnoyarsk',
- '32': 'Asia/Omsk',
- '33': 'Asia/Yekaterinburg',
- '34': 'Asia/Yekaterinburg',
- '35': 'Asia/Yekaterinburg',
- '36': 'Asia/Anadyr',
- '37': 'Europe/Moscow',
- '38': 'Europe/Volgograd',
- '39': 'Asia/Krasnoyarsk',
- '40': 'Asia/Yekaterinburg',
- '41': 'Europe/Moscow',
- '42': 'Europe/Moscow',
- '43': 'Europe/Moscow',
- '44': 'Asia/Magadan',
- '45': 'Europe/Samara',
- '46': 'Europe/Samara',
- '47': 'Europe/Moscow',
- '48': 'Europe/Moscow',
- '49': 'Europe/Moscow',
- '50': 'Asia/Yekaterinburg',
- '51': 'Europe/Moscow',
- '52': 'Europe/Moscow',
- '53': 'Asia/Novosibirsk',
- '54': 'Asia/Omsk',
- '55': 'Europe/Samara',
- '56': 'Europe/Moscow',
- '57': 'Europe/Samara',
- '58': 'Asia/Yekaterinburg',
- '59': 'Asia/Vladivostok',
- '60': 'Europe/Kaliningrad',
- '61': 'Europe/Volgograd',
- '62': 'Europe/Moscow',
- '63': 'Asia/Yakutsk',
- '64': 'Asia/Sakhalin',
- '65': 'Europe/Samara',
- '66': 'Europe/Moscow',
- '67': 'Europe/Samara',
- '68': 'Europe/Volgograd',
- '69': 'Europe/Moscow',
- '70': 'Europe/Volgograd',
- '71': 'Asia/Yekaterinburg',
- '72': 'Europe/Moscow',
- '73': 'Europe/Samara',
- '74': 'Asia/Krasnoyarsk',
- '75': 'Asia/Novosibirsk',
- '76': 'Europe/Moscow',
- '77': 'Europe/Moscow',
- '78': 'Asia/Yekaterinburg',
- '79': 'Asia/Irkutsk',
- '80': 'Asia/Yekaterinburg',
- '81': 'Europe/Samara',
- '82': 'Asia/Irkutsk',
- '83': 'Europe/Moscow',
- '84': 'Europe/Volgograd',
- '85': 'Europe/Moscow',
- '86': 'Europe/Moscow',
- '87': 'Asia/Novosibirsk',
- '88': 'Europe/Moscow',
- '89': 'Asia/Vladivostok'
- },
- 'RW': 'Africa/Kigali',
- 'SA': 'Asia/Riyadh',
- 'SB': 'Pacific/Guadalcanal',
- 'SC': 'Indian/Mahe',
- 'SD': 'Africa/Khartoum',
- 'SE': 'Europe/Stockholm',
- 'SG': 'Asia/Singapore',
- 'SH': 'Atlantic/St_Helena',
- 'SI': 'Europe/Ljubljana',
- 'SJ': 'Arctic/Longyearbyen',
- 'SK': 'Europe/Bratislava',
- 'SL': 'Africa/Freetown',
- 'SM': 'Europe/San_Marino',
- 'SN': 'Africa/Dakar',
- 'SO': 'Africa/Mogadishu',
- 'SR': 'America/Paramaribo',
- 'SS': 'Africa/Juba',
- 'ST': 'Africa/Sao_Tome',
- 'SV': 'America/El_Salvador',
- 'SX': 'America/Curacao',
- 'SY': 'Asia/Damascus',
- 'SZ': 'Africa/Mbabane',
- 'TC': 'America/Grand_Turk',
- 'TD': 'Africa/Ndjamena',
- 'TF': 'Indian/Kerguelen',
- 'TG': 'Africa/Lome',
- 'TH': 'Asia/Bangkok',
- 'TJ': 'Asia/Dushanbe',
- 'TK': 'Pacific/Fakaofo',
- 'TL': 'Asia/Dili',
- 'TM': 'Asia/Ashgabat',
- 'TN': 'Africa/Tunis',
- 'TO': 'Pacific/Tongatapu',
- 'TR': 'Asia/Istanbul',
- 'TT': 'America/Port_of_Spain',
- 'TV': 'Pacific/Funafuti',
- 'TW': 'Asia/Taipei',
- 'TZ': 'Africa/Dar_es_Salaam',
- 'UA': {
- '01': 'Europe/Kiev',
- '02': 'Europe/Kiev',
- '03': 'Europe/Uzhgorod',
- '04': 'Europe/Zaporozhye',
- '05': 'Europe/Zaporozhye',
- '06': 'Europe/Uzhgorod',
- '07': 'Europe/Zaporozhye',
- '08': 'Europe/Simferopol',
- '09': 'Europe/Kiev',
- '10': 'Europe/Zaporozhye',
- '11': 'Europe/Simferopol',
- '13': 'Europe/Kiev',
- '14': 'Europe/Zaporozhye',
- '15': 'Europe/Uzhgorod',
- '16': 'Europe/Zaporozhye',
- '17': 'Europe/Simferopol',
- '18': 'Europe/Zaporozhye',
- '19': 'Europe/Kiev',
- '20': 'Europe/Simferopol',
- '21': 'Europe/Kiev',
- '22': 'Europe/Uzhgorod',
- '23': 'Europe/Kiev',
- '24': 'Europe/Uzhgorod',
- '25': 'Europe/Uzhgorod',
- '26': 'Europe/Zaporozhye',
- '27': 'Europe/Kiev'
- },
- 'UG': 'Africa/Kampala',
- 'US': {
- 'AK': 'America/Anchorage',
- 'AL': 'America/Chicago',
- 'AR': 'America/Chicago',
- 'AZ': 'America/Phoenix',
- 'CA': 'America/Los_Angeles',
- 'CO': 'America/Denver',
- 'CT': 'America/New_York',
- 'DC': 'America/New_York',
- 'DE': 'America/New_York',
- 'FL': 'America/New_York',
- 'GA': 'America/New_York',
- 'HI': 'Pacific/Honolulu',
- 'IA': 'America/Chicago',
- 'ID': 'America/Denver',
- 'IL': 'America/Chicago',
- 'IN': 'America/Indianapolis',
- 'KS': 'America/Chicago',
- 'KY': 'America/New_York',
- 'LA': 'America/Chicago',
- 'MA': 'America/New_York',
- 'MD': 'America/New_York',
- 'ME': 'America/New_York',
- 'MI': 'America/New_York',
- 'MN': 'America/Chicago',
- 'MO': 'America/Chicago',
- 'MS': 'America/Chicago',
- 'MT': 'America/Denver',
- 'NC': 'America/New_York',
- 'ND': 'America/Chicago',
- 'NE': 'America/Chicago',
- 'NH': 'America/New_York',
- 'NJ': 'America/New_York',
- 'NM': 'America/Denver',
- 'NV': 'America/Los_Angeles',
- 'NY': 'America/New_York',
- 'OH': 'America/New_York',
- 'OK': 'America/Chicago',
- 'OR': 'America/Los_Angeles',
- 'PA': 'America/New_York',
- 'RI': 'America/New_York',
- 'SC': 'America/New_York',
- 'SD': 'America/Chicago',
- 'TN': 'America/Chicago',
- 'TX': 'America/Chicago',
- 'UT': 'America/Denver',
- 'VA': 'America/New_York',
- 'VT': 'America/New_York',
- 'WA': 'America/Los_Angeles',
- 'WI': 'America/Chicago',
- 'WV': 'America/New_York',
- 'WY': 'America/Denver'
- },
- 'UY': 'America/Montevideo',
- 'UZ': {
- '01': 'Asia/Tashkent',
- '02': 'Asia/Samarkand',
- '03': 'Asia/Tashkent',
- '06': 'Asia/Tashkent',
- '07': 'Asia/Samarkand',
- '08': 'Asia/Samarkand',
- '09': 'Asia/Samarkand',
- '10': 'Asia/Samarkand',
- '12': 'Asia/Samarkand',
- '13': 'Asia/Tashkent',
- '14': 'Asia/Tashkent'
- },
- 'VA': 'Europe/Vatican',
- 'VC': 'America/St_Vincent',
- 'VE': 'America/Caracas',
- 'VG': 'America/Tortola',
- 'VI': 'America/St_Thomas',
- 'VN': 'Asia/Phnom_Penh',
- 'VU': 'Pacific/Efate',
- 'WF': 'Pacific/Wallis',
- 'WS': 'Pacific/Samoa',
- 'YE': 'Asia/Aden',
- 'YT': 'Indian/Mayotte',
- 'YU': 'Europe/Belgrade',
- 'ZA': 'Africa/Johannesburg',
- 'ZM': 'Africa/Lusaka',
- 'ZW': 'Africa/Harare'
- }
-
+_country = {}
+_country["AD"] = "Europe/Andorra"
+_country["AE"] = "Asia/Dubai"
+_country["AF"] = "Asia/Kabul"
+_country["AG"] = "America/Antigua"
+_country["AI"] = "America/Anguilla"
+_country["AL"] = "Europe/Tirane"
+_country["AM"] = "Asia/Yerevan"
+_country["AO"] = "Africa/Luanda"
+_country["AR"] = {}
+_country["AR"]["01"] = "America/Argentina/Buenos_Aires"
+_country["AR"]["02"] = "America/Argentina/Catamarca"
+_country["AR"]["03"] = "America/Argentina/Tucuman"
+_country["AR"]["04"] = "America/Argentina/Rio_Gallegos"
+_country["AR"]["05"] = "America/Argentina/Cordoba"
+_country["AR"]["06"] = "America/Argentina/Tucuman"
+_country["AR"]["07"] = "America/Argentina/Buenos_Aires"
+_country["AR"]["08"] = "America/Argentina/Buenos_Aires"
+_country["AR"]["09"] = "America/Argentina/Tucuman"
+_country["AR"]["10"] = "America/Argentina/Jujuy"
+_country["AR"]["11"] = "America/Argentina/San_Luis"
+_country["AR"]["12"] = "America/Argentina/La_Rioja"
+_country["AR"]["13"] = "America/Argentina/Mendoza"
+_country["AR"]["14"] = "America/Argentina/Buenos_Aires"
+_country["AR"]["15"] = "America/Argentina/San_Luis"
+_country["AR"]["16"] = "America/Argentina/Buenos_Aires"
+_country["AR"]["17"] = "America/Argentina/Salta"
+_country["AR"]["18"] = "America/Argentina/San_Juan"
+_country["AR"]["19"] = "America/Argentina/San_Luis"
+_country["AR"]["20"] = "America/Argentina/Rio_Gallegos"
+_country["AR"]["21"] = "America/Argentina/Buenos_Aires"
+_country["AR"]["22"] = "America/Argentina/Catamarca"
+_country["AR"]["23"] = "America/Argentina/Ushuaia"
+_country["AR"]["24"] = "America/Argentina/Tucuman"
+_country["AS"] = "US/Samoa"
+_country["AT"] = "Europe/Vienna"
+_country["AU"] = {}
+_country["AU"]["01"] = "Australia/Canberra"
+_country["AU"]["02"] = "Australia/NSW"
+_country["AU"]["03"] = "Australia/North"
+_country["AU"]["04"] = "Australia/Queensland"
+_country["AU"]["05"] = "Australia/South"
+_country["AU"]["06"] = "Australia/Tasmania"
+_country["AU"]["07"] = "Australia/Victoria"
+_country["AU"]["08"] = "Australia/West"
+_country["AW"] = "America/Aruba"
+_country["AX"] = "Europe/Mariehamn"
+_country["AZ"] = "Asia/Baku"
+_country["BA"] = "Europe/Sarajevo"
+_country["BB"] = "America/Barbados"
+_country["BD"] = "Asia/Dhaka"
+_country["BE"] = "Europe/Brussels"
+_country["BF"] = "Africa/Ouagadougou"
+_country["BG"] = "Europe/Sofia"
+_country["BH"] = "Asia/Bahrain"
+_country["BI"] = "Africa/Bujumbura"
+_country["BJ"] = "Africa/Porto-Novo"
+_country["BL"] = "America/St_Barthelemy"
+_country["BM"] = "Atlantic/Bermuda"
+_country["BN"] = "Asia/Brunei"
+_country["BO"] = "America/La_Paz"
+_country["BQ"] = "America/Curacao"
+_country["BR"] = {}
+_country["BR"]["01"] = "America/Rio_Branco"
+_country["BR"]["02"] = "America/Maceio"
+_country["BR"]["03"] = "America/Sao_Paulo"
+_country["BR"]["04"] = "America/Manaus"
+_country["BR"]["05"] = "America/Bahia"
+_country["BR"]["06"] = "America/Fortaleza"
+_country["BR"]["07"] = "America/Sao_Paulo"
+_country["BR"]["08"] = "America/Sao_Paulo"
+_country["BR"]["11"] = "America/Campo_Grande"
+_country["BR"]["13"] = "America/Belem"
+_country["BR"]["14"] = "America/Cuiaba"
+_country["BR"]["15"] = "America/Sao_Paulo"
+_country["BR"]["16"] = "America/Belem"
+_country["BR"]["17"] = "America/Recife"
+_country["BR"]["18"] = "America/Sao_Paulo"
+_country["BR"]["20"] = "America/Fortaleza"
+_country["BR"]["21"] = "America/Sao_Paulo"
+_country["BR"]["22"] = "America/Recife"
+_country["BR"]["23"] = "America/Sao_Paulo"
+_country["BR"]["24"] = "America/Porto_Velho"
+_country["BR"]["25"] = "America/Boa_Vista"
+_country["BR"]["26"] = "America/Sao_Paulo"
+_country["BR"]["27"] = "America/Sao_Paulo"
+_country["BR"]["28"] = "America/Maceio"
+_country["BR"]["29"] = "America/Sao_Paulo"
+_country["BR"]["30"] = "America/Recife"
+_country["BR"]["31"] = "America/Araguaina"
+_country["BS"] = "America/Nassau"
+_country["BT"] = "Asia/Thimphu"
+_country["BW"] = "Africa/Gaborone"
+_country["BY"] = "Europe/Minsk"
+_country["BZ"] = "America/Belize"
+_country["CA"] = {}
+_country["CA"]["AB"] = "America/Edmonton"
+_country["CA"]["BC"] = "America/Vancouver"
+_country["CA"]["MB"] = "America/Winnipeg"
+_country["CA"]["NB"] = "America/Halifax"
+_country["CA"]["NL"] = "America/St_Johns"
+_country["CA"]["NS"] = "America/Halifax"
+_country["CA"]["NT"] = "America/Yellowknife"
+_country["CA"]["NU"] = "America/Rankin_Inlet"
+_country["CA"]["ON"] = "America/Rainy_River"
+_country["CA"]["PE"] = "America/Halifax"
+_country["CA"]["QC"] = "America/Montreal"
+_country["CA"]["SK"] = "America/Regina"
+_country["CA"]["YT"] = "America/Whitehorse"
+_country["CC"] = "Indian/Cocos"
+_country["CD"] = {}
+_country["CD"]["02"] = "Africa/Kinshasa"
+_country["CD"]["05"] = "Africa/Lubumbashi"
+_country["CD"]["06"] = "Africa/Kinshasa"
+_country["CD"]["08"] = "Africa/Kinshasa"
+_country["CD"]["10"] = "Africa/Lubumbashi"
+_country["CD"]["11"] = "Africa/Lubumbashi"
+_country["CD"]["12"] = "Africa/Lubumbashi"
+_country["CF"] = "Africa/Bangui"
+_country["CG"] = "Africa/Brazzaville"
+_country["CH"] = "Europe/Zurich"
+_country["CI"] = "Africa/Abidjan"
+_country["CK"] = "Pacific/Rarotonga"
+_country["CL"] = "Chile/Continental"
+_country["CM"] = "Africa/Lagos"
+_country["CN"] = {}
+_country["CN"]["01"] = "Asia/Shanghai"
+_country["CN"]["02"] = "Asia/Shanghai"
+_country["CN"]["03"] = "Asia/Shanghai"
+_country["CN"]["04"] = "Asia/Shanghai"
+_country["CN"]["05"] = "Asia/Harbin"
+_country["CN"]["06"] = "Asia/Chongqing"
+_country["CN"]["07"] = "Asia/Shanghai"
+_country["CN"]["08"] = "Asia/Harbin"
+_country["CN"]["09"] = "Asia/Shanghai"
+_country["CN"]["10"] = "Asia/Shanghai"
+_country["CN"]["11"] = "Asia/Chongqing"
+_country["CN"]["12"] = "Asia/Shanghai"
+_country["CN"]["13"] = "Asia/Urumqi"
+_country["CN"]["14"] = "Asia/Chongqing"
+_country["CN"]["15"] = "Asia/Chongqing"
+_country["CN"]["16"] = "Asia/Chongqing"
+_country["CN"]["18"] = "Asia/Chongqing"
+_country["CN"]["19"] = "Asia/Harbin"
+_country["CN"]["20"] = "Asia/Harbin"
+_country["CN"]["21"] = "Asia/Chongqing"
+_country["CN"]["22"] = "Asia/Harbin"
+_country["CN"]["23"] = "Asia/Shanghai"
+_country["CN"]["24"] = "Asia/Chongqing"
+_country["CN"]["25"] = "Asia/Shanghai"
+_country["CN"]["26"] = "Asia/Chongqing"
+_country["CN"]["28"] = "Asia/Shanghai"
+_country["CN"]["29"] = "Asia/Chongqing"
+_country["CN"]["30"] = "Asia/Chongqing"
+_country["CN"]["31"] = "Asia/Chongqing"
+_country["CN"]["32"] = "Asia/Chongqing"
+_country["CN"]["33"] = "Asia/Chongqing"
+_country["CO"] = "America/Bogota"
+_country["CR"] = "America/Costa_Rica"
+_country["CU"] = "America/Havana"
+_country["CV"] = "Atlantic/Cape_Verde"
+_country["CW"] = "America/Curacao"
+_country["CX"] = "Indian/Christmas"
+_country["CY"] = "Asia/Nicosia"
+_country["CZ"] = "Europe/Prague"
+_country["DE"] = "Europe/Berlin"
+_country["DJ"] = "Africa/Djibouti"
+_country["DK"] = "Europe/Copenhagen"
+_country["DM"] = "America/Dominica"
+_country["DO"] = "America/Santo_Domingo"
+_country["DZ"] = "Africa/Algiers"
+_country["EC"] = {}
+_country["EC"]["01"] = "Pacific/Galapagos"
+_country["EC"]["02"] = "America/Guayaquil"
+_country["EC"]["03"] = "America/Guayaquil"
+_country["EC"]["04"] = "America/Guayaquil"
+_country["EC"]["05"] = "America/Guayaquil"
+_country["EC"]["06"] = "America/Guayaquil"
+_country["EC"]["07"] = "America/Guayaquil"
+_country["EC"]["08"] = "America/Guayaquil"
+_country["EC"]["09"] = "America/Guayaquil"
+_country["EC"]["10"] = "America/Guayaquil"
+_country["EC"]["11"] = "America/Guayaquil"
+_country["EC"]["12"] = "America/Guayaquil"
+_country["EC"]["13"] = "America/Guayaquil"
+_country["EC"]["14"] = "America/Guayaquil"
+_country["EC"]["15"] = "America/Guayaquil"
+_country["EC"]["17"] = "America/Guayaquil"
+_country["EC"]["18"] = "America/Guayaquil"
+_country["EC"]["19"] = "America/Guayaquil"
+_country["EC"]["20"] = "America/Guayaquil"
+_country["EC"]["22"] = "America/Guayaquil"
+_country["EE"] = "Europe/Tallinn"
+_country["EG"] = "Africa/Cairo"
+_country["EH"] = "Africa/El_Aaiun"
+_country["ER"] = "Africa/Asmera"
+_country["ES"] = {}
+_country["ES"]["07"] = "Europe/Madrid"
+_country["ES"]["27"] = "Europe/Madrid"
+_country["ES"]["29"] = "Europe/Madrid"
+_country["ES"]["31"] = "Europe/Madrid"
+_country["ES"]["32"] = "Europe/Madrid"
+_country["ES"]["34"] = "Europe/Madrid"
+_country["ES"]["39"] = "Europe/Madrid"
+_country["ES"]["51"] = "Africa/Ceuta"
+_country["ES"]["52"] = "Europe/Madrid"
+_country["ES"]["53"] = "Atlantic/Canary"
+_country["ES"]["54"] = "Europe/Madrid"
+_country["ES"]["55"] = "Europe/Madrid"
+_country["ES"]["56"] = "Europe/Madrid"
+_country["ES"]["57"] = "Europe/Madrid"
+_country["ES"]["58"] = "Europe/Madrid"
+_country["ES"]["59"] = "Europe/Madrid"
+_country["ES"]["60"] = "Europe/Madrid"
+_country["ET"] = "Africa/Addis_Ababa"
+_country["FI"] = "Europe/Helsinki"
+_country["FJ"] = "Pacific/Fiji"
+_country["FK"] = "Atlantic/Stanley"
+_country["FO"] = "Atlantic/Faeroe"
+_country["FR"] = "Europe/Paris"
+_country["GA"] = "Africa/Libreville"
+_country["GB"] = "Europe/London"
+_country["GD"] = "America/Grenada"
+_country["GE"] = "Asia/Tbilisi"
+_country["GF"] = "America/Cayenne"
+_country["GG"] = "Europe/Guernsey"
+_country["GH"] = "Africa/Accra"
+_country["GI"] = "Europe/Gibraltar"
+_country["GL"] = {}
+_country["GL"]["01"] = "America/Thule"
+_country["GL"]["02"] = "America/Godthab"
+_country["GL"]["03"] = "America/Godthab"
+_country["GM"] = "Africa/Banjul"
+_country["GN"] = "Africa/Conakry"
+_country["GP"] = "America/Guadeloupe"
+_country["GQ"] = "Africa/Malabo"
+_country["GR"] = "Europe/Athens"
+_country["GS"] = "Atlantic/South_Georgia"
+_country["GT"] = "America/Guatemala"
+_country["GU"] = "Pacific/Guam"
+_country["GW"] = "Africa/Bissau"
+_country["GY"] = "America/Guyana"
+_country["HK"] = "Asia/Hong_Kong"
+_country["HN"] = "America/Tegucigalpa"
+_country["HR"] = "Europe/Zagreb"
+_country["HT"] = "America/Port-au-Prince"
+_country["HU"] = "Europe/Budapest"
+_country["ID"] = {}
+_country["ID"]["01"] = "Asia/Pontianak"
+_country["ID"]["02"] = "Asia/Makassar"
+_country["ID"]["03"] = "Asia/Jakarta"
+_country["ID"]["04"] = "Asia/Jakarta"
+_country["ID"]["05"] = "Asia/Jakarta"
+_country["ID"]["06"] = "Asia/Jakarta"
+_country["ID"]["07"] = "Asia/Jakarta"
+_country["ID"]["08"] = "Asia/Jakarta"
+_country["ID"]["09"] = "Asia/Jayapura"
+_country["ID"]["10"] = "Asia/Jakarta"
+_country["ID"]["11"] = "Asia/Pontianak"
+_country["ID"]["12"] = "Asia/Makassar"
+_country["ID"]["13"] = "Asia/Makassar"
+_country["ID"]["14"] = "Asia/Makassar"
+_country["ID"]["15"] = "Asia/Jakarta"
+_country["ID"]["16"] = "Asia/Makassar"
+_country["ID"]["17"] = "Asia/Makassar"
+_country["ID"]["18"] = "Asia/Makassar"
+_country["ID"]["19"] = "Asia/Pontianak"
+_country["ID"]["20"] = "Asia/Makassar"
+_country["ID"]["21"] = "Asia/Makassar"
+_country["ID"]["22"] = "Asia/Makassar"
+_country["ID"]["23"] = "Asia/Makassar"
+_country["ID"]["24"] = "Asia/Jakarta"
+_country["ID"]["25"] = "Asia/Pontianak"
+_country["ID"]["26"] = "Asia/Pontianak"
+_country["ID"]["30"] = "Asia/Jakarta"
+_country["ID"]["31"] = "Asia/Makassar"
+_country["ID"]["33"] = "Asia/Jakarta"
+_country["IE"] = "Europe/Dublin"
+_country["IL"] = "Asia/Jerusalem"
+_country["IM"] = "Europe/Isle_of_Man"
+_country["IN"] = "Asia/Calcutta"
+_country["IO"] = "Indian/Chagos"
+_country["IQ"] = "Asia/Baghdad"
+_country["IR"] = "Asia/Tehran"
+_country["IS"] = "Atlantic/Reykjavik"
+_country["IT"] = "Europe/Rome"
+_country["JE"] = "Europe/Jersey"
+_country["JM"] = "America/Jamaica"
+_country["JO"] = "Asia/Amman"
+_country["JP"] = "Asia/Tokyo"
+_country["KE"] = "Africa/Nairobi"
+_country["KG"] = "Asia/Bishkek"
+_country["KH"] = "Asia/Phnom_Penh"
+_country["KI"] = "Pacific/Tarawa"
+_country["KM"] = "Indian/Comoro"
+_country["KN"] = "America/St_Kitts"
+_country["KP"] = "Asia/Pyongyang"
+_country["KR"] = "Asia/Seoul"
+_country["KW"] = "Asia/Kuwait"
+_country["KY"] = "America/Cayman"
+_country["KZ"] = {}
+_country["KZ"]["01"] = "Asia/Almaty"
+_country["KZ"]["02"] = "Asia/Almaty"
+_country["KZ"]["03"] = "Asia/Qyzylorda"
+_country["KZ"]["04"] = "Asia/Aqtobe"
+_country["KZ"]["05"] = "Asia/Qyzylorda"
+_country["KZ"]["06"] = "Asia/Aqtau"
+_country["KZ"]["07"] = "Asia/Oral"
+_country["KZ"]["08"] = "Asia/Qyzylorda"
+_country["KZ"]["09"] = "Asia/Aqtau"
+_country["KZ"]["10"] = "Asia/Qyzylorda"
+_country["KZ"]["11"] = "Asia/Almaty"
+_country["KZ"]["12"] = "Asia/Qyzylorda"
+_country["KZ"]["13"] = "Asia/Aqtobe"
+_country["KZ"]["14"] = "Asia/Qyzylorda"
+_country["KZ"]["15"] = "Asia/Almaty"
+_country["KZ"]["16"] = "Asia/Aqtobe"
+_country["KZ"]["17"] = "Asia/Almaty"
+_country["LA"] = "Asia/Vientiane"
+_country["LB"] = "Asia/Beirut"
+_country["LC"] = "America/St_Lucia"
+_country["LI"] = "Europe/Vaduz"
+_country["LK"] = "Asia/Colombo"
+_country["LR"] = "Africa/Monrovia"
+_country["LS"] = "Africa/Maseru"
+_country["LT"] = "Europe/Vilnius"
+_country["LU"] = "Europe/Luxembourg"
+_country["LV"] = "Europe/Riga"
+_country["LY"] = "Africa/Tripoli"
+_country["MA"] = "Africa/Casablanca"
+_country["MC"] = "Europe/Monaco"
+_country["MD"] = "Europe/Chisinau"
+_country["ME"] = "Europe/Podgorica"
+_country["MF"] = "America/Marigot"
+_country["MG"] = "Indian/Antananarivo"
+_country["MK"] = "Europe/Skopje"
+_country["ML"] = "Africa/Bamako"
+_country["MM"] = "Asia/Rangoon"
+_country["MN"] = "Asia/Choibalsan"
+_country["MO"] = "Asia/Macao"
+_country["MP"] = "Pacific/Saipan"
+_country["MQ"] = "America/Martinique"
+_country["MR"] = "Africa/Nouakchott"
+_country["MS"] = "America/Montserrat"
+_country["MT"] = "Europe/Malta"
+_country["MU"] = "Indian/Mauritius"
+_country["MV"] = "Indian/Maldives"
+_country["MW"] = "Africa/Blantyre"
+_country["MX"] = {}
+_country["MX"]["01"] = "America/Mexico_City"
+_country["MX"]["02"] = "America/Tijuana"
+_country["MX"]["03"] = "America/Hermosillo"
+_country["MX"]["04"] = "America/Merida"
+_country["MX"]["05"] = "America/Mexico_City"
+_country["MX"]["06"] = "America/Chihuahua"
+_country["MX"]["07"] = "America/Monterrey"
+_country["MX"]["08"] = "America/Mexico_City"
+_country["MX"]["09"] = "America/Mexico_City"
+_country["MX"]["10"] = "America/Mazatlan"
+_country["MX"]["11"] = "America/Mexico_City"
+_country["MX"]["12"] = "America/Mexico_City"
+_country["MX"]["13"] = "America/Mexico_City"
+_country["MX"]["14"] = "America/Mazatlan"
+_country["MX"]["15"] = "America/Chihuahua"
+_country["MX"]["16"] = "America/Mexico_City"
+_country["MX"]["17"] = "America/Mexico_City"
+_country["MX"]["18"] = "America/Mazatlan"
+_country["MX"]["19"] = "America/Monterrey"
+_country["MX"]["20"] = "America/Mexico_City"
+_country["MX"]["21"] = "America/Mexico_City"
+_country["MX"]["22"] = "America/Mexico_City"
+_country["MX"]["23"] = "America/Cancun"
+_country["MX"]["24"] = "America/Mexico_City"
+_country["MX"]["25"] = "America/Mazatlan"
+_country["MX"]["26"] = "America/Hermosillo"
+_country["MX"]["27"] = "America/Merida"
+_country["MX"]["28"] = "America/Monterrey"
+_country["MX"]["29"] = "America/Mexico_City"
+_country["MX"]["30"] = "America/Mexico_City"
+_country["MX"]["31"] = "America/Merida"
+_country["MX"]["32"] = "America/Monterrey"
+_country["MY"] = {}
+_country["MY"]["01"] = "Asia/Kuala_Lumpur"
+_country["MY"]["02"] = "Asia/Kuala_Lumpur"
+_country["MY"]["03"] = "Asia/Kuala_Lumpur"
+_country["MY"]["04"] = "Asia/Kuala_Lumpur"
+_country["MY"]["05"] = "Asia/Kuala_Lumpur"
+_country["MY"]["06"] = "Asia/Kuala_Lumpur"
+_country["MY"]["07"] = "Asia/Kuala_Lumpur"
+_country["MY"]["08"] = "Asia/Kuala_Lumpur"
+_country["MY"]["09"] = "Asia/Kuala_Lumpur"
+_country["MY"]["11"] = "Asia/Kuching"
+_country["MY"]["12"] = "Asia/Kuala_Lumpur"
+_country["MY"]["13"] = "Asia/Kuala_Lumpur"
+_country["MY"]["14"] = "Asia/Kuala_Lumpur"
+_country["MY"]["15"] = "Asia/Kuching"
+_country["MY"]["16"] = "Asia/Kuching"
+_country["MZ"] = "Africa/Maputo"
+_country["NA"] = "Africa/Windhoek"
+_country["NC"] = "Pacific/Noumea"
+_country["NE"] = "Africa/Niamey"
+_country["NF"] = "Pacific/Norfolk"
+_country["NG"] = "Africa/Lagos"
+_country["NI"] = "America/Managua"
+_country["NL"] = "Europe/Amsterdam"
+_country["NO"] = "Europe/Oslo"
+_country["NP"] = "Asia/Katmandu"
+_country["NR"] = "Pacific/Nauru"
+_country["NU"] = "Pacific/Niue"
+_country["NZ"] = {}
+_country["NZ"]["85"] = "Pacific/Auckland"
+_country["NZ"]["E7"] = "Pacific/Auckland"
+_country["NZ"]["E8"] = "Pacific/Auckland"
+_country["NZ"]["E9"] = "Pacific/Auckland"
+_country["NZ"]["F1"] = "Pacific/Auckland"
+_country["NZ"]["F2"] = "Pacific/Auckland"
+_country["NZ"]["F3"] = "Pacific/Auckland"
+_country["NZ"]["F4"] = "Pacific/Auckland"
+_country["NZ"]["F5"] = "Pacific/Auckland"
+_country["NZ"]["F7"] = "Pacific/Chatham"
+_country["NZ"]["F8"] = "Pacific/Auckland"
+_country["NZ"]["F9"] = "Pacific/Auckland"
+_country["NZ"]["G1"] = "Pacific/Auckland"
+_country["NZ"]["G2"] = "Pacific/Auckland"
+_country["NZ"]["G3"] = "Pacific/Auckland"
+_country["OM"] = "Asia/Muscat"
+_country["PA"] = "America/Panama"
+_country["PE"] = "America/Lima"
+_country["PF"] = "Pacific/Marquesas"
+_country["PG"] = "Pacific/Port_Moresby"
+_country["PH"] = "Asia/Manila"
+_country["PK"] = "Asia/Karachi"
+_country["PL"] = "Europe/Warsaw"
+_country["PM"] = "America/Miquelon"
+_country["PN"] = "Pacific/Pitcairn"
+_country["PR"] = "America/Puerto_Rico"
+_country["PS"] = "Asia/Gaza"
+_country["PT"] = {}
+_country["PT"]["02"] = "Europe/Lisbon"
+_country["PT"]["03"] = "Europe/Lisbon"
+_country["PT"]["04"] = "Europe/Lisbon"
+_country["PT"]["05"] = "Europe/Lisbon"
+_country["PT"]["06"] = "Europe/Lisbon"
+_country["PT"]["07"] = "Europe/Lisbon"
+_country["PT"]["08"] = "Europe/Lisbon"
+_country["PT"]["09"] = "Europe/Lisbon"
+_country["PT"]["10"] = "Atlantic/Madeira"
+_country["PT"]["11"] = "Europe/Lisbon"
+_country["PT"]["13"] = "Europe/Lisbon"
+_country["PT"]["14"] = "Europe/Lisbon"
+_country["PT"]["16"] = "Europe/Lisbon"
+_country["PT"]["17"] = "Europe/Lisbon"
+_country["PT"]["18"] = "Europe/Lisbon"
+_country["PT"]["19"] = "Europe/Lisbon"
+_country["PT"]["20"] = "Europe/Lisbon"
+_country["PT"]["21"] = "Europe/Lisbon"
+_country["PT"]["22"] = "Europe/Lisbon"
+_country["PW"] = "Pacific/Palau"
+_country["PY"] = "America/Asuncion"
+_country["QA"] = "Asia/Qatar"
+_country["RE"] = "Indian/Reunion"
+_country["RO"] = "Europe/Bucharest"
+_country["RS"] = "Europe/Belgrade"
+_country["RU"] = {}
+_country["RU"]["01"] = "Europe/Volgograd"
+_country["RU"]["02"] = "Asia/Irkutsk"
+_country["RU"]["03"] = "Asia/Novokuznetsk"
+_country["RU"]["04"] = "Asia/Novosibirsk"
+_country["RU"]["05"] = "Asia/Vladivostok"
+_country["RU"]["06"] = "Europe/Moscow"
+_country["RU"]["07"] = "Europe/Volgograd"
+_country["RU"]["08"] = "Europe/Samara"
+_country["RU"]["09"] = "Europe/Moscow"
+_country["RU"]["10"] = "Europe/Moscow"
+_country["RU"]["11"] = "Asia/Irkutsk"
+_country["RU"]["13"] = "Asia/Yekaterinburg"
+_country["RU"]["14"] = "Asia/Irkutsk"
+_country["RU"]["15"] = "Asia/Anadyr"
+_country["RU"]["16"] = "Europe/Samara"
+_country["RU"]["17"] = "Europe/Volgograd"
+_country["RU"]["18"] = "Asia/Krasnoyarsk"
+_country["RU"]["20"] = "Asia/Irkutsk"
+_country["RU"]["21"] = "Europe/Moscow"
+_country["RU"]["22"] = "Europe/Volgograd"
+_country["RU"]["23"] = "Europe/Kaliningrad"
+_country["RU"]["24"] = "Europe/Volgograd"
+_country["RU"]["25"] = "Europe/Moscow"
+_country["RU"]["26"] = "Asia/Kamchatka"
+_country["RU"]["27"] = "Europe/Volgograd"
+_country["RU"]["28"] = "Europe/Moscow"
+_country["RU"]["29"] = "Asia/Novokuznetsk"
+_country["RU"]["30"] = "Asia/Vladivostok"
+_country["RU"]["31"] = "Asia/Krasnoyarsk"
+_country["RU"]["32"] = "Asia/Omsk"
+_country["RU"]["33"] = "Asia/Yekaterinburg"
+_country["RU"]["34"] = "Asia/Yekaterinburg"
+_country["RU"]["35"] = "Asia/Yekaterinburg"
+_country["RU"]["36"] = "Asia/Anadyr"
+_country["RU"]["37"] = "Europe/Moscow"
+_country["RU"]["38"] = "Europe/Volgograd"
+_country["RU"]["39"] = "Asia/Krasnoyarsk"
+_country["RU"]["40"] = "Asia/Yekaterinburg"
+_country["RU"]["41"] = "Europe/Moscow"
+_country["RU"]["42"] = "Europe/Moscow"
+_country["RU"]["43"] = "Europe/Moscow"
+_country["RU"]["44"] = "Asia/Magadan"
+_country["RU"]["45"] = "Europe/Samara"
+_country["RU"]["46"] = "Europe/Samara"
+_country["RU"]["47"] = "Europe/Moscow"
+_country["RU"]["48"] = "Europe/Moscow"
+_country["RU"]["49"] = "Europe/Moscow"
+_country["RU"]["50"] = "Asia/Yekaterinburg"
+_country["RU"]["51"] = "Europe/Moscow"
+_country["RU"]["52"] = "Europe/Moscow"
+_country["RU"]["53"] = "Asia/Novosibirsk"
+_country["RU"]["54"] = "Asia/Omsk"
+_country["RU"]["55"] = "Europe/Samara"
+_country["RU"]["56"] = "Europe/Moscow"
+_country["RU"]["57"] = "Europe/Samara"
+_country["RU"]["58"] = "Asia/Yekaterinburg"
+_country["RU"]["59"] = "Asia/Vladivostok"
+_country["RU"]["60"] = "Europe/Kaliningrad"
+_country["RU"]["61"] = "Europe/Volgograd"
+_country["RU"]["62"] = "Europe/Moscow"
+_country["RU"]["63"] = "Asia/Yakutsk"
+_country["RU"]["64"] = "Asia/Sakhalin"
+_country["RU"]["65"] = "Europe/Samara"
+_country["RU"]["66"] = "Europe/Moscow"
+_country["RU"]["67"] = "Europe/Samara"
+_country["RU"]["68"] = "Europe/Volgograd"
+_country["RU"]["69"] = "Europe/Moscow"
+_country["RU"]["70"] = "Europe/Volgograd"
+_country["RU"]["71"] = "Asia/Yekaterinburg"
+_country["RU"]["72"] = "Europe/Moscow"
+_country["RU"]["73"] = "Europe/Samara"
+_country["RU"]["74"] = "Asia/Krasnoyarsk"
+_country["RU"]["75"] = "Asia/Novosibirsk"
+_country["RU"]["76"] = "Europe/Moscow"
+_country["RU"]["77"] = "Europe/Moscow"
+_country["RU"]["78"] = "Asia/Yekaterinburg"
+_country["RU"]["79"] = "Asia/Irkutsk"
+_country["RU"]["80"] = "Asia/Yekaterinburg"
+_country["RU"]["81"] = "Europe/Samara"
+_country["RU"]["82"] = "Asia/Irkutsk"
+_country["RU"]["83"] = "Europe/Moscow"
+_country["RU"]["84"] = "Europe/Volgograd"
+_country["RU"]["85"] = "Europe/Moscow"
+_country["RU"]["86"] = "Europe/Moscow"
+_country["RU"]["87"] = "Asia/Novosibirsk"
+_country["RU"]["88"] = "Europe/Moscow"
+_country["RU"]["89"] = "Asia/Vladivostok"
+_country["RW"] = "Africa/Kigali"
+_country["SA"] = "Asia/Riyadh"
+_country["SB"] = "Pacific/Guadalcanal"
+_country["SC"] = "Indian/Mahe"
+_country["SD"] = "Africa/Khartoum"
+_country["SE"] = "Europe/Stockholm"
+_country["SG"] = "Asia/Singapore"
+_country["SH"] = "Atlantic/St_Helena"
+_country["SI"] = "Europe/Ljubljana"
+_country["SJ"] = "Arctic/Longyearbyen"
+_country["SK"] = "Europe/Bratislava"
+_country["SL"] = "Africa/Freetown"
+_country["SM"] = "Europe/San_Marino"
+_country["SN"] = "Africa/Dakar"
+_country["SO"] = "Africa/Mogadishu"
+_country["SR"] = "America/Paramaribo"
+_country["ST"] = "Africa/Sao_Tome"
+_country["SV"] = "America/El_Salvador"
+_country["SX"] = "America/Curacao"
+_country["SY"] = "Asia/Damascus"
+_country["SZ"] = "Africa/Mbabane"
+_country["TC"] = "America/Grand_Turk"
+_country["TD"] = "Africa/Ndjamena"
+_country["TF"] = "Indian/Kerguelen"
+_country["TG"] = "Africa/Lome"
+_country["TH"] = "Asia/Bangkok"
+_country["TJ"] = "Asia/Dushanbe"
+_country["TK"] = "Pacific/Fakaofo"
+_country["TL"] = "Asia/Dili"
+_country["TM"] = "Asia/Ashgabat"
+_country["TN"] = "Africa/Tunis"
+_country["TO"] = "Pacific/Tongatapu"
+_country["TR"] = "Asia/Istanbul"
+_country["TT"] = "America/Port_of_Spain"
+_country["TV"] = "Pacific/Funafuti"
+_country["TW"] = "Asia/Taipei"
+_country["TZ"] = "Africa/Dar_es_Salaam"
+_country["UA"] = {}
+_country["UA"]["01"] = "Europe/Kiev"
+_country["UA"]["02"] = "Europe/Kiev"
+_country["UA"]["03"] = "Europe/Uzhgorod"
+_country["UA"]["04"] = "Europe/Zaporozhye"
+_country["UA"]["05"] = "Europe/Zaporozhye"
+_country["UA"]["06"] = "Europe/Uzhgorod"
+_country["UA"]["07"] = "Europe/Zaporozhye"
+_country["UA"]["08"] = "Europe/Simferopol"
+_country["UA"]["09"] = "Europe/Kiev"
+_country["UA"]["10"] = "Europe/Zaporozhye"
+_country["UA"]["11"] = "Europe/Simferopol"
+_country["UA"]["13"] = "Europe/Kiev"
+_country["UA"]["14"] = "Europe/Zaporozhye"
+_country["UA"]["15"] = "Europe/Uzhgorod"
+_country["UA"]["16"] = "Europe/Zaporozhye"
+_country["UA"]["17"] = "Europe/Simferopol"
+_country["UA"]["18"] = "Europe/Zaporozhye"
+_country["UA"]["19"] = "Europe/Kiev"
+_country["UA"]["20"] = "Europe/Simferopol"
+_country["UA"]["21"] = "Europe/Kiev"
+_country["UA"]["22"] = "Europe/Uzhgorod"
+_country["UA"]["23"] = "Europe/Kiev"
+_country["UA"]["24"] = "Europe/Uzhgorod"
+_country["UA"]["25"] = "Europe/Uzhgorod"
+_country["UA"]["26"] = "Europe/Zaporozhye"
+_country["UA"]["27"] = "Europe/Kiev"
+_country["UG"] = "Africa/Kampala"
+_country["US"] = {}
+_country["US"]["AK"] = "America/Anchorage"
+_country["US"]["AL"] = "America/Chicago"
+_country["US"]["AR"] = "America/Chicago"
+_country["US"]["AZ"] = "America/Phoenix"
+_country["US"]["CA"] = "America/Los_Angeles"
+_country["US"]["CO"] = "America/Denver"
+_country["US"]["CT"] = "America/New_York"
+_country["US"]["DC"] = "America/New_York"
+_country["US"]["DE"] = "America/New_York"
+_country["US"]["FL"] = "America/New_York"
+_country["US"]["GA"] = "America/New_York"
+_country["US"]["HI"] = "Pacific/Honolulu"
+_country["US"]["IA"] = "America/Chicago"
+_country["US"]["ID"] = "America/Denver"
+_country["US"]["IL"] = "America/Chicago"
+_country["US"]["IN"] = "America/Indianapolis"
+_country["US"]["KS"] = "America/Chicago"
+_country["US"]["KY"] = "America/New_York"
+_country["US"]["LA"] = "America/Chicago"
+_country["US"]["MA"] = "America/New_York"
+_country["US"]["MD"] = "America/New_York"
+_country["US"]["ME"] = "America/New_York"
+_country["US"]["MI"] = "America/New_York"
+_country["US"]["MN"] = "America/Chicago"
+_country["US"]["MO"] = "America/Chicago"
+_country["US"]["MS"] = "America/Chicago"
+_country["US"]["MT"] = "America/Denver"
+_country["US"]["NC"] = "America/New_York"
+_country["US"]["ND"] = "America/Chicago"
+_country["US"]["NE"] = "America/Chicago"
+_country["US"]["NH"] = "America/New_York"
+_country["US"]["NJ"] = "America/New_York"
+_country["US"]["NM"] = "America/Denver"
+_country["US"]["NV"] = "America/Los_Angeles"
+_country["US"]["NY"] = "America/New_York"
+_country["US"]["OH"] = "America/New_York"
+_country["US"]["OK"] = "America/Chicago"
+_country["US"]["OR"] = "America/Los_Angeles"
+_country["US"]["PA"] = "America/New_York"
+_country["US"]["RI"] = "America/New_York"
+_country["US"]["SC"] = "America/New_York"
+_country["US"]["SD"] = "America/Chicago"
+_country["US"]["TN"] = "America/Chicago"
+_country["US"]["TX"] = "America/Chicago"
+_country["US"]["UT"] = "America/Denver"
+_country["US"]["VA"] = "America/New_York"
+_country["US"]["VT"] = "America/New_York"
+_country["US"]["WA"] = "America/Los_Angeles"
+_country["US"]["WI"] = "America/Chicago"
+_country["US"]["WV"] = "America/New_York"
+_country["US"]["WY"] = "America/Denver"
+_country["UY"] = "America/Montevideo"
+_country["UZ"] = {}
+_country["UZ"]["01"] = "Asia/Tashkent"
+_country["UZ"]["02"] = "Asia/Samarkand"
+_country["UZ"]["03"] = "Asia/Tashkent"
+_country["UZ"]["06"] = "Asia/Tashkent"
+_country["UZ"]["07"] = "Asia/Samarkand"
+_country["UZ"]["08"] = "Asia/Samarkand"
+_country["UZ"]["09"] = "Asia/Samarkand"
+_country["UZ"]["10"] = "Asia/Samarkand"
+_country["UZ"]["12"] = "Asia/Samarkand"
+_country["UZ"]["13"] = "Asia/Tashkent"
+_country["UZ"]["14"] = "Asia/Tashkent"
+_country["VA"] = "Europe/Vatican"
+_country["VC"] = "America/St_Vincent"
+_country["VE"] = "America/Caracas"
+_country["VG"] = "America/Tortola"
+_country["VI"] = "America/St_Thomas"
+_country["VN"] = "Asia/Phnom_Penh"
+_country["VU"] = "Pacific/Efate"
+_country["WF"] = "Pacific/Wallis"
+_country["WS"] = "Pacific/Samoa"
+_country["YE"] = "Asia/Aden"
+_country["YT"] = "Indian/Mayotte"
+_country["YU"] = "Europe/Belgrade"
+_country["ZA"] = "Africa/Johannesburg"
+_country["ZM"] = "Africa/Lusaka"
+_country["ZW"] = "Africa/Harare"
def time_zone_by_country_and_region(country_code, region_name=None):
+
if country_code not in _country:
- return ''
+ return None
if not region_name or region_name == '00':
region_name = None
timezones = _country[country_code]
+
if isinstance(timezones, str):
return timezones
- if not region_name:
- return ''
-
- return timezones.get(region_name)
+ if region_name:
+ return timezones.get(region_name)
diff --git a/lib/pygeoip/util.py b/lib/pygeoip/util.py
index cca3fec..f2873f3 100644
--- a/lib/pygeoip/util.py
+++ b/lib/pygeoip/util.py
@@ -1,36 +1,42 @@
-# -*- coding: utf-8 -*-
-"""
-Utility functions. Part of the pygeoip package.
-
-@author: Jennifer Ennis
-
-@license: Copyright(C) 2004 MaxMind LLC
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU Lesser General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU Lesser General Public License
-along with this program. If not, see .
-"""
-
-import socket
-import binascii
-
-
-def ip2long(ip):
- """
- Wrapper function for IPv4 and IPv6 converters
- @param ip: IPv4 or IPv6 address
- @type ip: str
- """
- try:
- return int(binascii.hexlify(socket.inet_aton(ip)), 16)
- except socket.error:
- return int(binascii.hexlify(socket.inet_pton(socket.AF_INET6, ip)), 16)
+"""
+Misc. utility functions. It is part of the pygeoip package.
+
+@author: Jennifer Ennis
+
+@license:
+Copyright(C) 2004 MaxMind LLC
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU Lesser General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public License
+along with this program. If not, see .
+"""
+
+import six
+
+def ip2long(ip):
+ """
+ Convert a IPv4 address into a 32-bit integer.
+
+ @param ip: quad-dotted IPv4 address
+ @type ip: str
+ @return: network byte order 32-bit integer
+ @rtype: int
+ """
+ ip_array = ip.split('.')
+
+ if six.PY3:
+ # int and long are unified in py3
+ ip_long = int(ip_array[0]) * 16777216 + int(ip_array[1]) * 65536 + int(ip_array[2]) * 256 + int(ip_array[3])
+ else:
+ ip_long = long(ip_array[0]) * 16777216 + long(ip_array[1]) * 65536 + long(ip_array[2]) * 256 + long(ip_array[3])
+ return ip_long
+
diff --git a/lib/spotimeta/__init__.py b/lib/spotimeta/__init__.py
new file mode 100644
index 0000000..fe40d73
--- /dev/null
+++ b/lib/spotimeta/__init__.py
@@ -0,0 +1,270 @@
+""""Library for querying the Spotify metadata service"""
+
+__version__ = "0.2"
+__author__ = "Rune Halvorsen "
+__homepage__ = "http://bitbucket.org/runeh/spotimeta/"
+__docformat__ = "restructuredtext"
+
+
+import sys
+import urllib2
+import time
+
+try:
+ from email.utils import parsedate_tz, mktime_tz, formatdate
+except ImportError: # utils module name was lowercased after 2.4
+ from email.Utils import parsedate_tz, mktime_tz, formatdate
+
+
+from urllib import urlencode
+from parser import parse_lookup_doc, parse_search_doc
+
+
+API_VERSION = "1"
+USER_AGENT = "Spotimeta %s" % __version__
+
+
+class SpotimetaError(Exception):
+ """Superclass for all spotimeta exceptions. Adds no functionality. Only
+ there so it's possible to set up try blocks that catch all spotimeta
+ errors, regardless of class"""
+ pass
+
+
+class RequestTimeout(SpotimetaError):
+ """Raised when the timeout flag is in use and a request did not finish
+ within the allotted time."""
+ pass
+
+
+class NotFound(SpotimetaError):
+ """Raised when doing lookup on something that does not exist. Triggered
+ by the 404 http status code"""
+ pass
+
+
+class RateLimiting(SpotimetaError):
+ """Raised when the request was not completed due to rate limiting
+ restrictions"""
+ pass
+
+
+class ServiceUnavailable(SpotimetaError):
+ """Raised when the metadata service is not available (that is, the server
+ is up, but not accepting API requests at this time"""
+ pass
+
+
+class ServerError(SpotimetaError):
+ """Raised when an internal server error occurs. According to the spotify
+ documentation, this "should not happen"."""
+ pass
+
+
+def canonical(url_or_uri):
+ """returns a spotify uri, regardless if a url or uri is passed in"""
+ if url_or_uri.startswith("http"): # assume it's a url
+ parts = url_or_uri.split("/")
+ return "spotify:%s:%s" % (parts[-2], parts[-1])
+ else:
+ return url_or_uri
+
+
+def entrytype(url_or_uri):
+ """Return "album", "artist" or "track" based on the type of entry the uri
+ or url refers to."""
+ uri = canonical(url_or_uri)
+ try:
+ return uri.split(":")[1]
+ except IndexError:
+ return None
+
+
+class Metadata(object):
+
+ def __init__(self, cache=None, rate=10, timeout=None, user_agent=None):
+ self.cache = cache # not implemented yet
+ self.rate = rate # not implemented yet
+ self.timeout = timeout
+ self.user_agent = user_agent or USER_AGENT
+ self._timeout_supported = True
+ self._port = "80"
+ self._host = "ws.spotify.com"
+ self._detailtypes = {
+ "artist": {1: "album", 2: "albumdetail"},
+ "album": {1: "track", 2: "trackdetail"}
+ }
+
+
+ major, minor = sys.version_info[:2]
+ if self.timeout and major == 2 and minor <6:
+ self._timeout_supported = False
+ import warnings
+ warnings.warn("Timeouts in urllib not supported in this version" +
+ " of python. timeout argument will be ignored!")
+
+
+ def _do_request(self, url, headers):
+ """Perform an actual response. Deal with 200 and 304 responses
+ correctly. If another error occurs, raise the appropriate
+ exception"""
+ try:
+ req = urllib2.Request(url, None, headers)
+ if self.timeout and self._timeout_supported:
+ return urllib2.urlopen(req, timeout=self.timeout)
+ else:
+ return urllib2.urlopen(req)
+
+ except urllib2.HTTPError, e:
+ if e.code == 304:
+ return e # looks wrong but isnt't. On non fatal errors the
+ # exception behaves like the retval from urlopen
+ elif e.code == 404:
+ raise NotFound()
+ elif e.code == 403:
+ raise RateLimiting()
+ elif e.code == 500:
+ raise ServerError()
+ elif e.code == 503:
+ raise ServiceUnavailable()
+ else:
+ raise # this should never happen
+ except urllib2.URLError, e:
+ """Probably timeout. should do a better check. FIXME"""
+ raise RequestTimeout()
+ except:
+ raise
+ # all the exceptions we don't know about yet. Probably
+ # some socket errors will come up here.
+
+ def _get_url(self, url, query, if_modified_since=None):
+ """Perform an http requests and return the open file-like object, if
+ there is one, as well as the expiry time and last-modified-time
+ if they were present in the reply.
+ If the if_modified_since variable is passed in, send it as the value
+ of the If-Modified-Since header."""
+ if query:
+ url = "%s?%s" %(url, urlencode(query))
+
+ headers = {'User-Agent': self.user_agent}
+ if if_modified_since:
+ headers["If-Modified-Since"] = formatdate(if_modified_since, False, True)
+
+ fp = self._do_request(url, headers)
+
+ # at this point we have something file like after the request
+ # finished with a 200 or 304.
+
+ headers = fp.info()
+ if fp.code == 304:
+ fp = None
+
+ expires = None
+ if "Expires" in headers:
+ expires = mktime_tz(parsedate_tz(headers.get("Expires")))
+
+ modified = None
+ if "Last-Modified" in headers:
+ modified = mktime_tz(parsedate_tz(headers.get("Last-Modified")))
+
+ return fp, modified, expires
+
+
+ def lookup(self, uri, detail=0):
+ """Lookup metadata for a URI. Optionally ask for extra details.
+ The details argument is an int: 0 for normal ammount of detauls, 1
+ for extra details, and 2 for most details. For tracks the details
+ argument is ignored, as the Spotify api only has one level of detail
+ for tracks. For the meaning of the detail levels, look at the
+ Spotify api docs"""
+
+ key = "%s:%s" % (uri, detail)
+ res, modified, expires = self._cache_get(key)
+
+ if res and time.time() < expires:
+ return res
+ # else, cache is outdated or entry not in it. Normal request cycle
+
+ url = "http://%s:%s/lookup/%s/" % (self._host, self._port, API_VERSION)
+ uri = canonical(uri)
+ query = {"uri": uri}
+ kind = entrytype(uri)
+
+ if detail in (1,2) and kind in self._detailtypes.keys():
+ query["extras"] = self._detailtypes[kind][detail]
+
+ fp, new_modified, new_expires = self._get_url(url, query, modified)
+
+ if fp: # We got data, sweet
+ res = parse_lookup_doc(fp, uri=uri)
+
+ self._cache_put(key, res, new_modified or modified, new_expires or expires)
+ return res
+
+ def search_album(self, term, page=None):
+ """The first page is numbered 1!"""
+ url = "http://%s:%s/search/%s/album" % (
+ self._host, self._port, API_VERSION)
+
+ return self._do_search(url, term, page)
+
+ def search_artist(self, term, page=None):
+ """The first page is numbered 1!"""
+ url = "http://%s:%s/search/%s/artist" % (
+ self._host, self._port, API_VERSION)
+
+ return self._do_search(url, term, page)
+
+ def search_track(self, term, page=None):
+ """The first page is numbered 1!"""
+ url = "http://%s:%s/search/%s/track" % (
+ self._host, self._port, API_VERSION)
+
+ return self._do_search(url, term, page)
+
+ def _do_search(self, url, term, page):
+ key = "%s:%s" % (term, page)
+
+ res, modified, expires = self._cache_get(key)
+ if res and time.time() < expires:
+ return res
+
+ query = {"q": term.encode('UTF-8')}
+
+ if page is not None:
+ query["page"] = str(page)
+
+ fp, new_modified, new_expires = self._get_url(url, query, modified)
+
+ if fp: # We got data, sweet
+ res = parse_search_doc(fp)
+
+ self._cache_put(key, res, new_modified or modified, new_expires or expires)
+
+ return res
+
+ def _cache_get(self, key):
+ """Get a tuple containing data, last-modified, expires.
+ If entry is not in cache return None, 0, 0
+ """
+ entry = None
+ if self.cache is not None:
+ entry = self.cache.get(key)
+
+ return entry or (None, 0, 0)
+
+ def _cache_put(self, key, value, modified, expires):
+ """Inverse of _cache_put"""
+ if self.cache is not None:
+ self.cache[key] = value, modified, expires
+
+# This is an instance of the metadata module used for module level
+# operations. Only suitable for simple stuff. Normally one should
+# instanciate Metadata manually with appropriate options, especially
+# with regards to caching
+_module_meta_instance = Metadata()
+
+lookup = _module_meta_instance.lookup
+search_album = _module_meta_instance.search_album
+search_artist = _module_meta_instance.search_artist
+search_track = _module_meta_instance.search_track
diff --git a/lib/spotimeta/parser.py b/lib/spotimeta/parser.py
new file mode 100644
index 0000000..98e66e6
--- /dev/null
+++ b/lib/spotimeta/parser.py
@@ -0,0 +1,196 @@
+from xml.dom import minidom
+
+# extremely boring dom parsing ahead. Consider yourself warned.
+
+
+# The reason for the uri arg is that the xml returned from lookups do not
+# contain the href uri of the thing that was looked up. However, when an
+# element is encountered that is NOT the root of a query, it DOES contain
+# the href. We pass it in so the returned data will have the same format
+# always
+def parse_lookup_doc(src, uri=None):
+ doc = minidom.parse(src)
+ root = doc.documentElement
+
+ if root.nodeName == "artist":
+ return {"type": "artist", "result": parse_artist(root, uri)}
+ elif root.nodeName == "album":
+ return {"type": "album", "result": parse_album(root, uri)}
+ elif root.nodeName == "track":
+ return {"type": "track", "result": parse_track(root, uri)}
+ else:
+ raise Exception("unknown node type! " + root.nodeName) # fixme: proper exception here
+
+
+def parse_search_doc(src):
+ doc = minidom.parse(src)
+ root = doc.documentElement
+
+ if root.nodeName == "artists":
+ return parse_artist_search(root)
+ elif root.nodeName == "albums":
+ return parse_album_search(root)
+ elif root.nodeName == "tracks":
+ return parse_track_search(root)
+ else:
+ raise Exception("unknown node type! " + root.nodeName) # fixme: proper exception here
+
+
+def parse_artist(root, uri=None):
+ ret = {}
+ if uri or root.hasAttribute("href"):
+ ret["href"] = uri or root.getAttribute("href")
+
+ for name, elem in _nodes(root):
+ if name == "name":
+ ret["name"] = _text(elem)
+ elif name == "albums":
+ ret["albums"] = parse_albumlist(elem)
+
+ return ret
+
+
+def parse_artistlist(root):
+ return map(parse_artist, _filter(root, "artist"))
+
+
+def parse_albumlist(root):
+ return map(parse_album, _filter(root, "album"))
+
+
+def parse_tracklist(root):
+ return map(parse_track, _filter(root, "track"))
+
+
+def parse_album(root, uri=None):
+ ret = {}
+ if uri or root.hasAttribute("href"):
+ ret["href"] = uri or root.getAttribute("href")
+
+ for name, elem in _nodes(root):
+ if name == "name":
+ ret["name"] = _text(elem)
+ elif name == "released":
+ released = _text(elem)
+ if released:
+ ret["released"] = int(_text(elem))
+ elif name == "id":
+ if not "ids" in ret:
+ ret["ids"] = []
+ ret["ids"].append(parse_id(elem))
+ elif name == "tracks":
+ ret["tracks"] = parse_tracklist(elem)
+
+ ret["artists"] = parse_artistlist(root)
+ if len(ret["artists"]) == 1:
+ ret["artist"] = ret["artists"][0]
+ else:
+ ret["artist"] = None
+
+
+ # todo: availability stuff. RFH
+ return ret
+
+
+def parse_id(elem):
+ ret = {"type": elem.getAttribute("type"),
+ "id": _text(elem)}
+ if elem.hasAttribute("href"):
+ ret["href"] = elem.getAttribute("href")
+ return ret
+
+
+def parse_track(root, uri=None):
+ ret = {}
+ if uri or root.hasAttribute("href"):
+ ret["href"] = uri or root.getAttribute("href")
+
+ for name, elem in _nodes(root):
+ if name == "name":
+ ret["name"] = _text(elem)
+ elif name == "disc-number":
+ ret["disc-number"] = int(_text(elem))
+ elif name == "track-number":
+ ret["track-number"] = int(_text(elem))
+ elif name == "length":
+ ret["length"] = float(_text(elem))
+ elif name == "popularity":
+ ret["popularity"] = float(_text(elem))
+ elif name == "album":
+ ret["album"] = parse_album(elem)
+ elif name == "id":
+ if not "ids" in ret:
+ ret["ids"] = []
+ ret["ids"].append(parse_id(elem))
+
+ ret["artists"] = parse_artistlist(root)
+
+ # Following prop is there for backwards compat. It may be dropped in a
+ # future version
+ if ret["artists"]:
+ ret["artist"] = ret["artists"][0]
+
+ return ret
+
+
+def parse_opensearch(root):
+ ret = {}
+ elems = root.getElementsByTagNameNS("http://a9.com/-/spec/opensearch/1.1/", "*")
+
+ for name, elem in ((e.localName, e) for e in elems):
+ if name == "Query":
+ ret["term"] = elem.getAttribute("searchTerms")
+ ret["start_page"] = int(elem.getAttribute("startPage"))
+ elif name == "totalResults":
+ ret["total_results"] = int(_text(elem))
+ elif name == "startIndex":
+ ret["start_index"] = int(_text(elem))
+ elif name == "itemsPerPage":
+ ret["items_per_page"] = int(_text(elem))
+
+ return ret
+
+
+def parse_album_search(root):
+ # Note that the search result tags are not tags or similar.
+ # Instead they are normal tags with extra
+ # stuff from the opensearch namespace. That's why we cant just directly
+ # return the result from parse_albumlist
+ ret = parse_opensearch(root)
+ ret["result"] = parse_albumlist(root)
+ return ret
+
+
+def parse_artist_search(root):
+ ret = parse_opensearch(root)
+ ret["result"] = parse_artistlist(root)
+ return ret
+
+
+def parse_track_search(root):
+ ret = parse_opensearch(root)
+ ret["result"] = parse_tracklist(root)
+ return ret
+
+
+def _nodes(elem):
+ """return an generator yielding element nodes that are children
+ of elem."""
+ return ((e.nodeName, e) for e
+ in elem.childNodes
+ if e.nodeType==e.ELEMENT_NODE)
+
+
+def _text(elem):
+ """Returns a concatenation of all text nodes that are children
+ of elem (roughly what elem.textContent does in web dom"""
+ return "".join((e.nodeValue for e
+ in elem.childNodes
+ if e.nodeType==e.TEXT_NODE))
+
+
+def _filter(elem, filtername):
+ """Returns a generator yielding all child nodes with the nodeName name"""
+ return (elem for (name, elem)
+ in _nodes(elem)
+ if name == filtername)
diff --git a/lib/tweepy/CONTRIBUTORS.txt b/lib/tweepy/CONTRIBUTORS.txt
deleted file mode 100644
index 12956df..0000000
--- a/lib/tweepy/CONTRIBUTORS.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-Thank you to all who have contributed to this project!
-If you contributed and not listed below please let me know.
-
-Aaron Swartz
-Adam Miskiewicz
-AlanBell
-Arthur Debert
-Bas Westerbaan
-Chris Kelly
-Clay McClure
-Ferenc Szalai
-Gergely Imreh
-Guan Yang
-Ivo Wetzel
-James Rowe
-Jenny Loomis
-Johannes Faigle
-Kumar Appaiah
-Michael (Doc) Norton
-Pascal Jürgens
-Robin Houston
-Sam Kaufman
-Thomas Bohmbach, Jr
-Wayne Moore
-Will McCutchen
-gilles
-Can Duruk
-Jan Schaumann (@jschauma)
-Stuart Powers
-Jeff Hull (@jsh2134)
-Mike (mikeandmore)
diff --git a/lib/tweepy/LICENSE.txt b/lib/tweepy/LICENSE.txt
deleted file mode 100644
index 8a91f2c..0000000
--- a/lib/tweepy/LICENSE.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-MIT License
-Copyright (c) 2009-2010 Joshua Roesslein
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/lib/tweepy/__init__.py b/lib/tweepy/__init__.py
deleted file mode 100644
index 4a45b54..0000000
--- a/lib/tweepy/__init__.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-"""
-Tweepy Twitter API library
-"""
-__version__ = '2.1'
-__author__ = 'Joshua Roesslein'
-__license__ = 'MIT'
-
-from tweepy.models import Status, User, DirectMessage, Friendship, SavedSearch, SearchResults, ModelFactory, Category
-from tweepy.error import TweepError
-from tweepy.api import API
-from tweepy.cache import Cache, MemoryCache, FileCache
-from tweepy.auth import BasicAuthHandler, OAuthHandler
-from tweepy.streaming import Stream, StreamListener
-from tweepy.cursor import Cursor
-
-# Global, unauthenticated instance of API
-api = API()
-
-def debug(enable=True, level=1):
-
- import httplib
- httplib.HTTPConnection.debuglevel = level
-
diff --git a/lib/tweepy/api.py b/lib/tweepy/api.py
deleted file mode 100644
index 7418809..0000000
--- a/lib/tweepy/api.py
+++ /dev/null
@@ -1,718 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-import os
-import mimetypes
-
-from tweepy.binder import bind_api
-from tweepy.error import TweepError
-from tweepy.parsers import ModelParser
-from tweepy.utils import list_to_csv
-
-
-class API(object):
- """Twitter API"""
-
- def __init__(self, auth_handler=None,
- host='api.twitter.com', search_host='search.twitter.com',
- cache=None, secure=True, api_root='/1.1', search_root='',
- retry_count=0, retry_delay=0, retry_errors=None, timeout=60,
- parser=None, compression=False):
- self.auth = auth_handler
- self.host = host
- self.search_host = search_host
- self.api_root = api_root
- self.search_root = search_root
- self.cache = cache
- self.secure = secure
- self.compression = compression
- self.retry_count = retry_count
- self.retry_delay = retry_delay
- self.retry_errors = retry_errors
- self.timeout = timeout
- self.parser = parser or ModelParser()
-
- """ statuses/home_timeline """
- home_timeline = bind_api(
- path = '/statuses/home_timeline.json',
- payload_type = 'status', payload_list = True,
- allowed_param = ['since_id', 'max_id', 'count'],
- require_auth = True
- )
-
- """ statuses/user_timeline """
- user_timeline = bind_api(
- path = '/statuses/user_timeline.json',
- payload_type = 'status', payload_list = True,
- allowed_param = ['id', 'user_id', 'screen_name', 'since_id',
- 'max_id', 'count', 'include_rts']
- )
-
- """ statuses/mentions """
- mentions_timeline = bind_api(
- path = '/statuses/mentions_timeline.json',
- payload_type = 'status', payload_list = True,
- allowed_param = ['since_id', 'max_id', 'count'],
- require_auth = True
- )
-
- """/statuses/:id/retweeted_by.format"""
- retweeted_by = bind_api(
- path = '/statuses/{id}/retweeted_by.json',
- payload_type = 'status', payload_list = True,
- allowed_param = ['id', 'count', 'page'],
- require_auth = True
- )
-
- """/related_results/show/:id.format"""
- related_results = bind_api(
- path = '/related_results/show/{id}.json',
- payload_type = 'relation', payload_list = True,
- allowed_param = ['id'],
- require_auth = False
- )
-
- """/statuses/:id/retweeted_by/ids.format"""
- retweeted_by_ids = bind_api(
- path = '/statuses/{id}/retweeted_by/ids.json',
- payload_type = 'ids',
- allowed_param = ['id', 'count', 'page'],
- require_auth = True
- )
-
- """ statuses/retweets_of_me """
- retweets_of_me = bind_api(
- path = '/statuses/retweets_of_me.json',
- payload_type = 'status', payload_list = True,
- allowed_param = ['since_id', 'max_id', 'count'],
- require_auth = True
- )
-
- """ statuses/show """
- get_status = bind_api(
- path = '/statuses/show.json',
- payload_type = 'status',
- allowed_param = ['id']
- )
-
- """ statuses/update """
- update_status = bind_api(
- path = '/statuses/update.json',
- method = 'POST',
- payload_type = 'status',
- allowed_param = ['status', 'in_reply_to_status_id', 'lat', 'long', 'source', 'place_id'],
- require_auth = True
- )
-
- """ statuses/destroy """
- destroy_status = bind_api(
- path = '/statuses/destroy/{id}.json',
- method = 'POST',
- payload_type = 'status',
- allowed_param = ['id'],
- require_auth = True
- )
-
- """ statuses/retweet """
- retweet = bind_api(
- path = '/statuses/retweet/{id}.json',
- method = 'POST',
- payload_type = 'status',
- allowed_param = ['id'],
- require_auth = True
- )
-
- """ statuses/retweets """
- retweets = bind_api(
- path = '/statuses/retweets/{id}.json',
- payload_type = 'status', payload_list = True,
- allowed_param = ['id', 'count'],
- require_auth = True
- )
-
- """ users/show """
- get_user = bind_api(
- path = '/users/show.json',
- payload_type = 'user',
- allowed_param = ['id', 'user_id', 'screen_name']
- )
-
- ''' statuses/oembed '''
- get_oembed = bind_api(
- path = '/statuses/oembed.json',
- payload_type = 'json',
- allowed_param = ['id', 'url', 'maxwidth', 'hide_media', 'omit_script', 'align', 'related', 'lang']
- )
-
- """ Perform bulk look up of users from user ID or screenname """
- def lookup_users(self, user_ids=None, screen_names=None):
- return self._lookup_users(list_to_csv(user_ids), list_to_csv(screen_names))
-
- _lookup_users = bind_api(
- path = '/users/lookup.json',
- payload_type = 'user', payload_list = True,
- allowed_param = ['user_id', 'screen_name'],
- )
-
- """ Get the authenticated user """
- def me(self):
- return self.get_user(screen_name=self.auth.get_username())
-
- """ users/search """
- search_users = bind_api(
- path = '/users/search.json',
- payload_type = 'user', payload_list = True,
- require_auth = True,
- allowed_param = ['q', 'per_page', 'page']
- )
-
- """ users/suggestions/:slug """
- suggested_users = bind_api(
- path = '/users/suggestions/{slug}.json',
- payload_type = 'user', payload_list = True,
- require_auth = True,
- allowed_param = ['slug', 'lang']
- )
-
- """ users/suggestions """
- suggested_categories = bind_api(
- path = '/users/suggestions.json',
- payload_type = 'category', payload_list = True,
- allowed_param = ['lang'],
- require_auth = True
- )
-
- """ users/suggestions/:slug/members """
- suggested_users_tweets = bind_api(
- path = '/users/suggestions/{slug}/members.json',
- payload_type = 'status', payload_list = True,
- allowed_param = ['slug'],
- require_auth = True
- )
-
- """ direct_messages """
- direct_messages = bind_api(
- path = '/direct_messages.json',
- payload_type = 'direct_message', payload_list = True,
- allowed_param = ['since_id', 'max_id', 'count'],
- require_auth = True
- )
-
- """ direct_messages/show """
- get_direct_message = bind_api(
- path = '/direct_messages/show/{id}.json',
- payload_type = 'direct_message',
- allowed_param = ['id'],
- require_auth = True
- )
-
- """ direct_messages/sent """
- sent_direct_messages = bind_api(
- path = '/direct_messages/sent.json',
- payload_type = 'direct_message', payload_list = True,
- allowed_param = ['since_id', 'max_id', 'count', 'page'],
- require_auth = True
- )
-
- """ direct_messages/new """
- send_direct_message = bind_api(
- path = '/direct_messages/new.json',
- method = 'POST',
- payload_type = 'direct_message',
- allowed_param = ['user', 'screen_name', 'user_id', 'text'],
- require_auth = True
- )
-
- """ direct_messages/destroy """
- destroy_direct_message = bind_api(
- path = '/direct_messages/destroy.json',
- method = 'DELETE',
- payload_type = 'direct_message',
- allowed_param = ['id'],
- require_auth = True
- )
-
- """ friendships/create """
- create_friendship = bind_api(
- path = '/friendships/create.json',
- method = 'POST',
- payload_type = 'user',
- allowed_param = ['id', 'user_id', 'screen_name', 'follow'],
- require_auth = True
- )
-
- """ friendships/destroy """
- destroy_friendship = bind_api(
- path = '/friendships/destroy.json',
- method = 'DELETE',
- payload_type = 'user',
- allowed_param = ['id', 'user_id', 'screen_name'],
- require_auth = True
- )
-
- """ friendships/show """
- show_friendship = bind_api(
- path = '/friendships/show.json',
- payload_type = 'friendship',
- allowed_param = ['source_id', 'source_screen_name',
- 'target_id', 'target_screen_name']
- )
-
- """ Perform bulk look up of friendships from user ID or screenname """
- def lookup_friendships(self, user_ids=None, screen_names=None):
- return self._lookup_friendships(list_to_csv(user_ids), list_to_csv(screen_names))
-
- _lookup_friendships = bind_api(
- path = '/friendships/lookup.json',
- payload_type = 'relationship', payload_list = True,
- allowed_param = ['user_id', 'screen_name'],
- require_auth = True
- )
-
-
- """ friends/ids """
- friends_ids = bind_api(
- path = '/friends/ids.json',
- payload_type = 'ids',
- allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
- )
-
- """ friends/list """
- friends = bind_api(
- path = '/friends/list.json',
- payload_type = 'user', payload_list = True,
- allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
- )
-
- """ friendships/incoming """
- friendships_incoming = bind_api(
- path = '/friendships/incoming.json',
- payload_type = 'ids',
- allowed_param = ['cursor']
- )
-
- """ friendships/outgoing"""
- friendships_outgoing = bind_api(
- path = '/friendships/outgoing.json',
- payload_type = 'ids',
- allowed_param = ['cursor']
- )
-
- """ followers/ids """
- followers_ids = bind_api(
- path = '/followers/ids.json',
- payload_type = 'ids',
- allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
- )
-
- """ followers/list """
- followers = bind_api(
- path = '/followers/list.json',
- payload_type = 'user', payload_list = True,
- allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
- )
-
- """ account/verify_credentials """
- def verify_credentials(self, **kargs):
- try:
- return bind_api(
- path = '/account/verify_credentials.json',
- payload_type = 'user',
- require_auth = True,
- allowed_param = ['include_entities', 'skip_status'],
- )(self, **kargs)
- except TweepError, e:
- if e.response and e.response.status == 401:
- return False
- raise
-
- """ account/rate_limit_status """
- rate_limit_status = bind_api(
- path = '/application/rate_limit_status.json',
- payload_type = 'json',
- allowed_param = ['resources'],
- use_cache = False
- )
-
- """ account/update_delivery_device """
- set_delivery_device = bind_api(
- path = '/account/update_delivery_device.json',
- method = 'POST',
- allowed_param = ['device'],
- payload_type = 'user',
- require_auth = True
- )
-
- """ account/update_profile_colors """
- update_profile_colors = bind_api(
- path = '/account/update_profile_colors.json',
- method = 'POST',
- payload_type = 'user',
- allowed_param = ['profile_background_color', 'profile_text_color',
- 'profile_link_color', 'profile_sidebar_fill_color',
- 'profile_sidebar_border_color'],
- require_auth = True
- )
-
- """ account/update_profile_image """
- def update_profile_image(self, filename):
- headers, post_data = API._pack_image(filename, 700)
- return bind_api(
- path = '/account/update_profile_image.json',
- method = 'POST',
- payload_type = 'user',
- require_auth = True
- )(self, post_data=post_data, headers=headers)
-
- """ account/update_profile_background_image """
- def update_profile_background_image(self, filename, *args, **kargs):
- headers, post_data = API._pack_image(filename, 800)
- bind_api(
- path = '/account/update_profile_background_image.json',
- method = 'POST',
- payload_type = 'user',
- allowed_param = ['tile'],
- require_auth = True
- )(self, post_data=post_data, headers=headers)
-
- """ account/update_profile """
- update_profile = bind_api(
- path = '/account/update_profile.json',
- method = 'POST',
- payload_type = 'user',
- allowed_param = ['name', 'url', 'location', 'description'],
- require_auth = True
- )
-
- """ favorites """
- favorites = bind_api(
- path = '/favorites/list.json',
- payload_type = 'status', payload_list = True,
- allowed_param = ['screen_name', 'user_id', 'max_id', 'count', 'since_id', 'max_id']
- )
-
- """ favorites/create """
- create_favorite = bind_api(
- path = '/favorites/create.json',
- method = 'POST',
- payload_type = 'status',
- allowed_param = ['id'],
- require_auth = True
- )
-
- """ favorites/destroy """
- destroy_favorite = bind_api(
- path = '/favorites/destroy.json',
- method = 'POST',
- payload_type = 'status',
- allowed_param = ['id'],
- require_auth = True
- )
-
- """ blocks/create """
- create_block = bind_api(
- path = '/blocks/create.json',
- method = 'POST',
- payload_type = 'user',
- allowed_param = ['id', 'user_id', 'screen_name'],
- require_auth = True
- )
-
- """ blocks/destroy """
- destroy_block = bind_api(
- path = '/blocks/destroy.json',
- method = 'DELETE',
- payload_type = 'user',
- allowed_param = ['id', 'user_id', 'screen_name'],
- require_auth = True
- )
-
- """ blocks/blocking """
- blocks = bind_api(
- path = '/blocks/list.json',
- payload_type = 'user', payload_list = True,
- allowed_param = ['cursor'],
- require_auth = True
- )
-
- """ blocks/blocking/ids """
- blocks_ids = bind_api(
- path = '/blocks/ids.json',
- payload_type = 'json',
- require_auth = True
- )
-
- """ report_spam """
- report_spam = bind_api(
- path = '/users/report_spam.json',
- method = 'POST',
- payload_type = 'user',
- allowed_param = ['user_id', 'screen_name'],
- require_auth = True
- )
-
- """ saved_searches """
- saved_searches = bind_api(
- path = '/saved_searches/list.json',
- payload_type = 'saved_search', payload_list = True,
- require_auth = True
- )
-
- """ saved_searches/show """
- get_saved_search = bind_api(
- path = '/saved_searches/show/{id}.json',
- payload_type = 'saved_search',
- allowed_param = ['id'],
- require_auth = True
- )
-
- """ saved_searches/create """
- create_saved_search = bind_api(
- path = '/saved_searches/create.json',
- method = 'POST',
- payload_type = 'saved_search',
- allowed_param = ['query'],
- require_auth = True
- )
-
- """ saved_searches/destroy """
- destroy_saved_search = bind_api(
- path = '/saved_searches/destroy/{id}.json',
- method = 'POST',
- payload_type = 'saved_search',
- allowed_param = ['id'],
- require_auth = True
- )
-
- """ help/test """
- def test(self):
- try:
- bind_api(
- path = '/help/test.json',
- )(self)
- except TweepError:
- return False
- return True
-
- create_list = bind_api(
- path = '/lists/create.json',
- method = 'POST',
- payload_type = 'list',
- allowed_param = ['name', 'mode', 'description'],
- require_auth = True
- )
-
- destroy_list = bind_api(
- path = '/lists/destroy.json',
- method = 'POST',
- payload_type = 'list',
- allowed_param = ['owner_screen_name', 'owner_id', 'list_id', 'slug'],
- require_auth = True
- )
-
- update_list = bind_api(
- path = '/lists/update.json',
- method = 'POST',
- payload_type = 'list',
- allowed_param = ['list_id', 'slug', 'name', 'mode', 'description', 'owner_screen_name', 'owner_id'],
- require_auth = True
- )
-
- lists_all = bind_api(
- path = '/lists/list.json',
- payload_type = 'list', payload_list = True,
- allowed_param = ['screen_name', 'user_id'],
- require_auth = True
- )
-
- lists_memberships = bind_api(
- path = '/lists/memberships.json',
- payload_type = 'list', payload_list = True,
- allowed_param = ['screen_name', 'user_id', 'filter_to_owned_lists', 'cursor'],
- require_auth = True
- )
-
- lists_subscriptions = bind_api(
- path = '/lists/subscriptions.json',
- payload_type = 'list', payload_list = True,
- allowed_param = ['screen_name', 'user_id', 'cursor'],
- require_auth = True
- )
-
- list_timeline = bind_api(
- path = '/lists/statuses.json',
- payload_type = 'status', payload_list = True,
- allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id', 'since_id', 'max_id', 'count']
- )
-
- get_list = bind_api(
- path = '/lists/show.json',
- payload_type = 'list',
- allowed_param = ['owner_screen_name', 'owner_id', 'slug', 'list_id']
- )
-
- add_list_member = bind_api(
- path = '/lists/members/create.json',
- method = 'POST',
- payload_type = 'list',
- allowed_param = ['screen_name', 'user_id', 'owner_screen_name', 'owner_id', 'slug', 'list_id'],
- require_auth = True
- )
-
- remove_list_member = bind_api(
- path = '/lists/members/destroy.json',
- method = 'POST',
- payload_type = 'list',
- allowed_param = ['screen_name', 'user_id', 'owner_screen_name', 'owner_id', 'slug', 'list_id'],
- require_auth = True
- )
-
- list_members = bind_api(
- path = '/lists/members.json',
- payload_type = 'user', payload_list = True,
- allowed_param = ['owner_screen_name', 'slug', 'list_id', 'owner_id', 'cursor']
- )
-
- show_list_member = bind_api(
- path = '/lists/members/show.json',
- payload_type = 'user',
- allowed_param = ['list_id', 'slug', 'user_id', 'screen_name', 'owner_screen_name', 'owner_id']
- )
-
- subscribe_list = bind_api(
- path = '/lists/subscribers/create.json',
- method = 'POST',
- payload_type = 'list',
- allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id'],
- require_auth = True
- )
-
- unsubscribe_list = bind_api(
- path = '/lists/subscribers/destroy.json',
- method = 'POST',
- payload_type = 'list',
- allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id'],
- require_auth = True
- )
-
- list_subscribers = bind_api(
- path = '/lists/subscribers.json',
- payload_type = 'user', payload_list = True,
- allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id', 'cursor']
- )
-
- show_list_subscriber = bind_api(
- path = '/lists/subscribers/show.json',
- payload_type = 'user',
- allowed_param = ['owner_screen_name', 'slug', 'screen_name', 'owner_id', 'list_id', 'user_id']
- )
-
- """ trends/available """
- trends_available = bind_api(
- path = '/trends/available.json',
- payload_type = 'json'
- )
-
- trends_place = bind_api(
- path = '/trends/place.json',
- payload_type = 'json',
- allowed_param = ['id', 'exclude']
- )
-
- trends_closest = bind_api(
- path = '/trends/closest.json',
- payload_type = 'json',
- allowed_param = ['lat', 'long']
- )
-
- """ search """
- search = bind_api(
- path = '/search/tweets.json',
- payload_type = 'search_results',
- allowed_param = ['q', 'lang', 'locale', 'since_id', 'geocode', 'show_user', 'max_id', 'since', 'until', 'result_type']
- )
-
- """ trends/daily """
- trends_daily = bind_api(
- path = '/trends/daily.json',
- payload_type = 'json',
- allowed_param = ['date', 'exclude']
- )
-
- """ trends/weekly """
- trends_weekly = bind_api(
- path = '/trends/weekly.json',
- payload_type = 'json',
- allowed_param = ['date', 'exclude']
- )
-
- """ geo/reverse_geocode """
- reverse_geocode = bind_api(
- path = '/geo/reverse_geocode.json',
- payload_type = 'place', payload_list = True,
- allowed_param = ['lat', 'long', 'accuracy', 'granularity', 'max_results']
- )
-
- """ geo/id """
- geo_id = bind_api(
- path = '/geo/id/{id}.json',
- payload_type = 'place',
- allowed_param = ['id']
- )
-
- """ geo/search """
- geo_search = bind_api(
- path = '/geo/search.json',
- payload_type = 'place', payload_list = True,
- allowed_param = ['lat', 'long', 'query', 'ip', 'granularity', 'accuracy', 'max_results', 'contained_within']
- )
-
- """ geo/similar_places """
- geo_similar_places = bind_api(
- path = '/geo/similar_places.json',
- payload_type = 'place', payload_list = True,
- allowed_param = ['lat', 'long', 'name', 'contained_within']
- )
-
- """ Internal use only """
- @staticmethod
- def _pack_image(filename, max_size):
- """Pack image from file into multipart-formdata post body"""
- # image must be less than 700kb in size
- try:
- if os.path.getsize(filename) > (max_size * 1024):
- raise TweepError('File is too big, must be less than 700kb.')
- except os.error:
- raise TweepError('Unable to access file')
-
- # image must be gif, jpeg, or png
- file_type = mimetypes.guess_type(filename)
- if file_type is None:
- raise TweepError('Could not determine file type')
- file_type = file_type[0]
- if file_type not in ['image/gif', 'image/jpeg', 'image/png']:
- raise TweepError('Invalid file type for image: %s' % file_type)
-
- # build the mulitpart-formdata body
- fp = open(filename, 'rb')
- BOUNDARY = 'Tw3ePy'
- body = []
- body.append('--' + BOUNDARY)
- body.append('Content-Disposition: form-data; name="image"; filename="%s"' % filename)
- body.append('Content-Type: %s' % file_type)
- body.append('')
- body.append(fp.read())
- body.append('--' + BOUNDARY + '--')
- body.append('')
- fp.close()
- body = '\r\n'.join(body)
-
- # build headers
- headers = {
- 'Content-Type': 'multipart/form-data; boundary=Tw3ePy',
- 'Content-Length': str(len(body))
- }
-
- return headers, body
-
diff --git a/lib/tweepy/auth.py b/lib/tweepy/auth.py
deleted file mode 100644
index 27890aa..0000000
--- a/lib/tweepy/auth.py
+++ /dev/null
@@ -1,163 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-from urllib2 import Request, urlopen
-import base64
-
-from tweepy import oauth
-from tweepy.error import TweepError
-from tweepy.api import API
-
-
-class AuthHandler(object):
-
- def apply_auth(self, url, method, headers, parameters):
- """Apply authentication headers to request"""
- raise NotImplementedError
-
- def get_username(self):
- """Return the username of the authenticated user"""
- raise NotImplementedError
-
-
-class BasicAuthHandler(AuthHandler):
-
- def __init__(self, username, password):
- self.username = username
- self._b64up = base64.b64encode('%s:%s' % (username, password))
-
- def apply_auth(self, url, method, headers, parameters):
- headers['Authorization'] = 'Basic %s' % self._b64up
-
- def get_username(self):
- return self.username
-
-
-class OAuthHandler(AuthHandler):
- """OAuth authentication handler"""
-
- OAUTH_HOST = 'api.twitter.com'
- OAUTH_ROOT = '/oauth/'
-
- def __init__(self, consumer_key, consumer_secret, callback=None, secure=False):
- self._consumer = oauth.OAuthConsumer(consumer_key, consumer_secret)
- self._sigmethod = oauth.OAuthSignatureMethod_HMAC_SHA1()
- self.request_token = None
- self.access_token = None
- self.callback = callback
- self.username = None
- self.secure = secure
-
- def _get_oauth_url(self, endpoint, secure=False):
- if self.secure or secure:
- prefix = 'https://'
- else:
- prefix = 'http://'
-
- return prefix + self.OAUTH_HOST + self.OAUTH_ROOT + endpoint
-
- def apply_auth(self, url, method, headers, parameters):
- request = oauth.OAuthRequest.from_consumer_and_token(
- self._consumer, http_url=url, http_method=method,
- token=self.access_token, parameters=parameters
- )
- request.sign_request(self._sigmethod, self._consumer, self.access_token)
- headers.update(request.to_header())
-
- def _get_request_token(self):
- try:
- url = self._get_oauth_url('request_token')
- request = oauth.OAuthRequest.from_consumer_and_token(
- self._consumer, http_url=url, callback=self.callback
- )
- request.sign_request(self._sigmethod, self._consumer, None)
- resp = urlopen(Request(url, headers=request.to_header()))
- return oauth.OAuthToken.from_string(resp.read())
- except Exception, e:
- raise TweepError(e)
-
- def set_request_token(self, key, secret):
- self.request_token = oauth.OAuthToken(key, secret)
-
- def set_access_token(self, key, secret):
- self.access_token = oauth.OAuthToken(key, secret)
-
- def get_authorization_url(self, signin_with_twitter=False):
- """Get the authorization URL to redirect the user"""
- try:
- # get the request token
- self.request_token = self._get_request_token()
-
- # build auth request and return as url
- if signin_with_twitter:
- url = self._get_oauth_url('authenticate')
- else:
- url = self._get_oauth_url('authorize')
- request = oauth.OAuthRequest.from_token_and_callback(
- token=self.request_token, http_url=url
- )
-
- return request.to_url()
- except Exception, e:
- raise TweepError(e)
-
- def get_access_token(self, verifier=None):
- """
- After user has authorized the request token, get access token
- with user supplied verifier.
- """
- try:
- url = self._get_oauth_url('access_token')
-
- # build request
- request = oauth.OAuthRequest.from_consumer_and_token(
- self._consumer,
- token=self.request_token, http_url=url,
- verifier=str(verifier)
- )
- request.sign_request(self._sigmethod, self._consumer, self.request_token)
-
- # send request
- resp = urlopen(Request(url, headers=request.to_header()))
- self.access_token = oauth.OAuthToken.from_string(resp.read())
- return self.access_token
- except Exception, e:
- raise TweepError(e)
-
- def get_xauth_access_token(self, username, password):
- """
- Get an access token from an username and password combination.
- In order to get this working you need to create an app at
- http://twitter.com/apps, after that send a mail to api@twitter.com
- and request activation of xAuth for it.
- """
- try:
- url = self._get_oauth_url('access_token', secure=True) # must use HTTPS
- request = oauth.OAuthRequest.from_consumer_and_token(
- oauth_consumer=self._consumer,
- http_method='POST', http_url=url,
- parameters = {
- 'x_auth_mode': 'client_auth',
- 'x_auth_username': username,
- 'x_auth_password': password
- }
- )
- request.sign_request(self._sigmethod, self._consumer, None)
-
- resp = urlopen(Request(url, data=request.to_postdata()))
- self.access_token = oauth.OAuthToken.from_string(resp.read())
- return self.access_token
- except Exception, e:
- raise TweepError(e)
-
- def get_username(self):
- if self.username is None:
- api = API(self)
- user = api.verify_credentials()
- if user:
- self.username = user.screen_name
- else:
- raise TweepError("Unable to get username, invalid oauth token!")
- return self.username
-
diff --git a/lib/tweepy/binder.py b/lib/tweepy/binder.py
deleted file mode 100644
index 0797215..0000000
--- a/lib/tweepy/binder.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-import httplib
-import urllib
-import time
-import re
-from StringIO import StringIO
-import gzip
-
-from tweepy.error import TweepError
-from tweepy.utils import convert_to_utf8_str
-from tweepy.models import Model
-
-re_path_template = re.compile('{\w+}')
-
-
-def bind_api(**config):
-
- class APIMethod(object):
-
- path = config['path']
- payload_type = config.get('payload_type', None)
- payload_list = config.get('payload_list', False)
- allowed_param = config.get('allowed_param', [])
- method = config.get('method', 'GET')
- require_auth = config.get('require_auth', False)
- search_api = config.get('search_api', False)
- use_cache = config.get('use_cache', True)
-
- def __init__(self, api, args, kargs):
- # If authentication is required and no credentials
- # are provided, throw an error.
- if self.require_auth and not api.auth:
- raise TweepError('Authentication required!')
-
- self.api = api
- self.post_data = kargs.pop('post_data', None)
- self.retry_count = kargs.pop('retry_count', api.retry_count)
- self.retry_delay = kargs.pop('retry_delay', api.retry_delay)
- self.retry_errors = kargs.pop('retry_errors', api.retry_errors)
- self.headers = kargs.pop('headers', {})
- self.build_parameters(args, kargs)
-
- # Pick correct URL root to use
- if self.search_api:
- self.api_root = api.search_root
- else:
- self.api_root = api.api_root
-
- # Perform any path variable substitution
- self.build_path()
-
- if api.secure:
- self.scheme = 'https://'
- else:
- self.scheme = 'http://'
-
- if self.search_api:
- self.host = api.search_host
- else:
- self.host = api.host
-
- # Manually set Host header to fix an issue in python 2.5
- # or older where Host is set including the 443 port.
- # This causes Twitter to issue 301 redirect.
- # See Issue https://github.com/tweepy/tweepy/issues/12
- self.headers['Host'] = self.host
-
- def build_parameters(self, args, kargs):
- self.parameters = {}
- for idx, arg in enumerate(args):
- if arg is None:
- continue
-
- try:
- self.parameters[self.allowed_param[idx]] = convert_to_utf8_str(arg)
- except IndexError:
- raise TweepError('Too many parameters supplied!')
-
- for k, arg in kargs.items():
- if arg is None:
- continue
- if k in self.parameters:
- raise TweepError('Multiple values for parameter %s supplied!' % k)
-
- self.parameters[k] = convert_to_utf8_str(arg)
-
- def build_path(self):
- for variable in re_path_template.findall(self.path):
- name = variable.strip('{}')
-
- if name == 'user' and 'user' not in self.parameters and self.api.auth:
- # No 'user' parameter provided, fetch it from Auth instead.
- value = self.api.auth.get_username()
- else:
- try:
- value = urllib.quote(self.parameters[name])
- except KeyError:
- raise TweepError('No parameter value found for path variable: %s' % name)
- del self.parameters[name]
-
- self.path = self.path.replace(variable, value)
-
- def execute(self):
- # Build the request URL
- url = self.api_root + self.path
- if len(self.parameters):
- url = '%s?%s' % (url, urllib.urlencode(self.parameters))
-
- # Query the cache if one is available
- # and this request uses a GET method.
- if self.use_cache and self.api.cache and self.method == 'GET':
- cache_result = self.api.cache.get(url)
- # if cache result found and not expired, return it
- if cache_result:
- # must restore api reference
- if isinstance(cache_result, list):
- for result in cache_result:
- if isinstance(result, Model):
- result._api = self.api
- else:
- if isinstance(cache_result, Model):
- cache_result._api = self.api
- return cache_result
-
- # Continue attempting request until successful
- # or maximum number of retries is reached.
- retries_performed = 0
- while retries_performed < self.retry_count + 1:
- # Open connection
- if self.api.secure:
- conn = httplib.HTTPSConnection(self.host, timeout=self.api.timeout)
- else:
- conn = httplib.HTTPConnection(self.host, timeout=self.api.timeout)
-
- # Apply authentication
- if self.api.auth:
- self.api.auth.apply_auth(
- self.scheme + self.host + url,
- self.method, self.headers, self.parameters
- )
-
- # Request compression if configured
- if self.api.compression:
- self.headers['Accept-encoding'] = 'gzip'
-
- # Execute request
- try:
- conn.request(self.method, url, headers=self.headers, body=self.post_data)
- resp = conn.getresponse()
- except Exception, e:
- raise TweepError('Failed to send request: %s' % e)
-
- # Exit request loop if non-retry error code
- if self.retry_errors:
- if resp.status not in self.retry_errors: break
- else:
- if resp.status == 200: break
-
- # Sleep before retrying request again
- time.sleep(self.retry_delay)
- retries_performed += 1
-
- # If an error was returned, throw an exception
- self.api.last_response = resp
- if resp.status != 200:
- try:
- error_msg = self.api.parser.parse_error(resp.read())
- except Exception:
- error_msg = "Twitter error response: status code = %s" % resp.status
- raise TweepError(error_msg, resp)
-
- # Parse the response payload
- body = resp.read()
- if resp.getheader('Content-Encoding', '') == 'gzip':
- try:
- zipper = gzip.GzipFile(fileobj=StringIO(body))
- body = zipper.read()
- except Exception, e:
- raise TweepError('Failed to decompress data: %s' % e)
- result = self.api.parser.parse(self, body)
-
- conn.close()
-
- # Store result into cache if one is available.
- if self.use_cache and self.api.cache and self.method == 'GET' and result:
- self.api.cache.store(url, result)
-
- return result
-
-
- def _call(api, *args, **kargs):
-
- method = APIMethod(api, args, kargs)
- return method.execute()
-
-
- # Set pagination mode
- if 'cursor' in APIMethod.allowed_param:
- _call.pagination_mode = 'cursor'
- elif 'max_id' in APIMethod.allowed_param and \
- 'since_id' in APIMethod.allowed_param:
- _call.pagination_mode = 'id'
- elif 'page' in APIMethod.allowed_param:
- _call.pagination_mode = 'page'
-
- return _call
-
diff --git a/lib/tweepy/cache.py b/lib/tweepy/cache.py
deleted file mode 100644
index 25564a3..0000000
--- a/lib/tweepy/cache.py
+++ /dev/null
@@ -1,424 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-import time
-import datetime
-import threading
-import os
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-try:
- import hashlib
-except ImportError:
- # python 2.4
- import md5 as hashlib
-
-try:
- import fcntl
-except ImportError:
- # Probably on a windows system
- # TODO: use win32file
- pass
-
-
-class Cache(object):
- """Cache interface"""
-
- def __init__(self, timeout=60):
- """Initialize the cache
- timeout: number of seconds to keep a cached entry
- """
- self.timeout = timeout
-
- def store(self, key, value):
- """Add new record to cache
- key: entry key
- value: data of entry
- """
- raise NotImplementedError
-
- def get(self, key, timeout=None):
- """Get cached entry if exists and not expired
- key: which entry to get
- timeout: override timeout with this value [optional]
- """
- raise NotImplementedError
-
- def count(self):
- """Get count of entries currently stored in cache"""
- raise NotImplementedError
-
- def cleanup(self):
- """Delete any expired entries in cache."""
- raise NotImplementedError
-
- def flush(self):
- """Delete all cached entries"""
- raise NotImplementedError
-
-
-class MemoryCache(Cache):
- """In-memory cache"""
-
- def __init__(self, timeout=60):
- Cache.__init__(self, timeout)
- self._entries = {}
- self.lock = threading.Lock()
-
- def __getstate__(self):
- # pickle
- return {'entries': self._entries, 'timeout': self.timeout}
-
- def __setstate__(self, state):
- # unpickle
- self.lock = threading.Lock()
- self._entries = state['entries']
- self.timeout = state['timeout']
-
- def _is_expired(self, entry, timeout):
- return timeout > 0 and (time.time() - entry[0]) >= timeout
-
- def store(self, key, value):
- self.lock.acquire()
- self._entries[key] = (time.time(), value)
- self.lock.release()
-
- def get(self, key, timeout=None):
- self.lock.acquire()
- try:
- # check to see if we have this key
- entry = self._entries.get(key)
- if not entry:
- # no hit, return nothing
- return None
-
- # use provided timeout in arguments if provided
- # otherwise use the one provided during init.
- if timeout is None:
- timeout = self.timeout
-
- # make sure entry is not expired
- if self._is_expired(entry, timeout):
- # entry expired, delete and return nothing
- del self._entries[key]
- return None
-
- # entry found and not expired, return it
- return entry[1]
- finally:
- self.lock.release()
-
- def count(self):
- return len(self._entries)
-
- def cleanup(self):
- self.lock.acquire()
- try:
- for k, v in self._entries.items():
- if self._is_expired(v, self.timeout):
- del self._entries[k]
- finally:
- self.lock.release()
-
- def flush(self):
- self.lock.acquire()
- self._entries.clear()
- self.lock.release()
-
-
-class FileCache(Cache):
- """File-based cache"""
-
- # locks used to make cache thread-safe
- cache_locks = {}
-
- def __init__(self, cache_dir, timeout=60):
- Cache.__init__(self, timeout)
- if os.path.exists(cache_dir) is False:
- os.mkdir(cache_dir)
- self.cache_dir = cache_dir
- if cache_dir in FileCache.cache_locks:
- self.lock = FileCache.cache_locks[cache_dir]
- else:
- self.lock = threading.Lock()
- FileCache.cache_locks[cache_dir] = self.lock
-
- if os.name == 'posix':
- self._lock_file = self._lock_file_posix
- self._unlock_file = self._unlock_file_posix
- elif os.name == 'nt':
- self._lock_file = self._lock_file_win32
- self._unlock_file = self._unlock_file_win32
- else:
- print 'Warning! FileCache locking not supported on this system!'
- self._lock_file = self._lock_file_dummy
- self._unlock_file = self._unlock_file_dummy
-
- def _get_path(self, key):
- md5 = hashlib.md5()
- md5.update(key)
- return os.path.join(self.cache_dir, md5.hexdigest())
-
- def _lock_file_dummy(self, path, exclusive=True):
- return None
-
- def _unlock_file_dummy(self, lock):
- return
-
- def _lock_file_posix(self, path, exclusive=True):
- lock_path = path + '.lock'
- if exclusive is True:
- f_lock = open(lock_path, 'w')
- fcntl.lockf(f_lock, fcntl.LOCK_EX)
- else:
- f_lock = open(lock_path, 'r')
- fcntl.lockf(f_lock, fcntl.LOCK_SH)
- if os.path.exists(lock_path) is False:
- f_lock.close()
- return None
- return f_lock
-
- def _unlock_file_posix(self, lock):
- lock.close()
-
- def _lock_file_win32(self, path, exclusive=True):
- # TODO: implement
- return None
-
- def _unlock_file_win32(self, lock):
- # TODO: implement
- return
-
- def _delete_file(self, path):
- os.remove(path)
- if os.path.exists(path + '.lock'):
- os.remove(path + '.lock')
-
- def store(self, key, value):
- path = self._get_path(key)
- self.lock.acquire()
- try:
- # acquire lock and open file
- f_lock = self._lock_file(path)
- datafile = open(path, 'wb')
-
- # write data
- pickle.dump((time.time(), value), datafile)
-
- # close and unlock file
- datafile.close()
- self._unlock_file(f_lock)
- finally:
- self.lock.release()
-
- def get(self, key, timeout=None):
- return self._get(self._get_path(key), timeout)
-
- def _get(self, path, timeout):
- if os.path.exists(path) is False:
- # no record
- return None
- self.lock.acquire()
- try:
- # acquire lock and open
- f_lock = self._lock_file(path, False)
- datafile = open(path, 'rb')
-
- # read pickled object
- created_time, value = pickle.load(datafile)
- datafile.close()
-
- # check if value is expired
- if timeout is None:
- timeout = self.timeout
- if timeout > 0 and (time.time() - created_time) >= timeout:
- # expired! delete from cache
- value = None
- self._delete_file(path)
-
- # unlock and return result
- self._unlock_file(f_lock)
- return value
- finally:
- self.lock.release()
-
- def count(self):
- c = 0
- for entry in os.listdir(self.cache_dir):
- if entry.endswith('.lock'):
- continue
- c += 1
- return c
-
- def cleanup(self):
- for entry in os.listdir(self.cache_dir):
- if entry.endswith('.lock'):
- continue
- self._get(os.path.join(self.cache_dir, entry), None)
-
- def flush(self):
- for entry in os.listdir(self.cache_dir):
- if entry.endswith('.lock'):
- continue
- self._delete_file(os.path.join(self.cache_dir, entry))
-
-class MemCacheCache(Cache):
- """Cache interface"""
-
- def __init__(self, client, timeout=60):
- """Initialize the cache
- client: The memcache client
- timeout: number of seconds to keep a cached entry
- """
- self.client = client
- self.timeout = timeout
-
- def store(self, key, value):
- """Add new record to cache
- key: entry key
- value: data of entry
- """
- self.client.set(key, value, time=self.timeout)
-
- def get(self, key, timeout=None):
- """Get cached entry if exists and not expired
- key: which entry to get
- timeout: override timeout with this value [optional]. DOES NOT WORK HERE
- """
- return self.client.get(key)
-
- def count(self):
- """Get count of entries currently stored in cache. RETURN 0"""
- raise NotImplementedError
-
- def cleanup(self):
- """Delete any expired entries in cache. NO-OP"""
- raise NotImplementedError
-
- def flush(self):
- """Delete all cached entries. NO-OP"""
- raise NotImplementedError
-
-class RedisCache(Cache):
- '''Cache running in a redis server'''
-
- def __init__(self, client, timeout=60, keys_container = 'tweepy:keys', pre_identifier = 'tweepy:'):
- Cache.__init__(self, timeout)
- self.client = client
- self.keys_container = keys_container
- self.pre_identifier = pre_identifier
-
- def _is_expired(self, entry, timeout):
- # Returns true if the entry has expired
- return timeout > 0 and (time.time() - entry[0]) >= timeout
-
- def store(self, key, value):
- '''Store the key, value pair in our redis server'''
- # Prepend tweepy to our key, this makes it easier to identify tweepy keys in our redis server
- key = self.pre_identifier + key
- # Get a pipe (to execute several redis commands in one step)
- pipe = self.client.pipeline()
- # Set our values in a redis hash (similar to python dict)
- pipe.set(key, pickle.dumps((time.time(), value)))
- # Set the expiration
- pipe.expire(key, self.timeout)
- # Add the key to a set containing all the keys
- pipe.sadd(self.keys_container, key)
- # Execute the instructions in the redis server
- pipe.execute()
-
- def get(self, key, timeout=None):
- '''Given a key, returns an element from the redis table'''
- key = self.pre_identifier + key
- # Check to see if we have this key
- unpickled_entry = self.client.get(key)
- if not unpickled_entry:
- # No hit, return nothing
- return None
-
- entry = pickle.loads(unpickled_entry)
- # Use provided timeout in arguments if provided
- # otherwise use the one provided during init.
- if timeout is None:
- timeout = self.timeout
-
- # Make sure entry is not expired
- if self._is_expired(entry, timeout):
- # entry expired, delete and return nothing
- self.delete_entry(key)
- return None
- # entry found and not expired, return it
- return entry[1]
-
- def count(self):
- '''Note: This is not very efficient, since it retreives all the keys from the redis
- server to know how many keys we have'''
- return len(self.client.smembers(self.keys_container))
-
- def delete_entry(self, key):
- '''Delete an object from the redis table'''
- pipe = self.client.pipeline()
- pipe.srem(self.keys_container, key)
- pipe.delete(key)
- pipe.execute()
-
- def cleanup(self):
- '''Cleanup all the expired keys'''
- keys = self.client.smembers(self.keys_container)
- for key in keys:
- entry = self.client.get(key)
- if entry:
- entry = pickle.loads(entry)
- if self._is_expired(entry, self.timeout):
- self.delete_entry(key)
-
- def flush(self):
- '''Delete all entries from the cache'''
- keys = self.client.smembers(self.keys_container)
- for key in keys:
- self.delete_entry(key)
-
-
-class MongodbCache(Cache):
- """A simple pickle-based MongoDB cache sytem."""
-
- def __init__(self, db, timeout=3600, collection='tweepy_cache'):
- """Should receive a "database" cursor from pymongo."""
- Cache.__init__(self, timeout)
- self.timeout = timeout
- self.col = db[collection]
- self.col.create_index('created', expireAfterSeconds=timeout)
-
- def store(self, key, value):
- from bson.binary import Binary
-
- now = datetime.datetime.utcnow()
- blob = Binary(pickle.dumps(value))
-
- self.col.insert({'created': now, '_id': key, 'value': blob})
-
- def get(self, key, timeout=None):
- if timeout:
- raise NotImplementedError
- obj = self.col.find_one({'_id': key})
- if obj:
- return pickle.loads(obj['value'])
-
- def count(self):
- return self.col.find({}).count()
-
- def delete_entry(self, key):
- return self.col.remove({'_id': key})
-
- def cleanup(self):
- """MongoDB will automatically clear expired keys."""
- pass
-
- def flush(self):
- self.col.drop()
- self.col.create_index('created', expireAfterSeconds=self.timeout)
diff --git a/lib/tweepy/cursor.py b/lib/tweepy/cursor.py
deleted file mode 100644
index 9061bfd..0000000
--- a/lib/tweepy/cursor.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-from tweepy.error import TweepError
-
-class Cursor(object):
- """Pagination helper class"""
-
- def __init__(self, method, *args, **kargs):
- if hasattr(method, 'pagination_mode'):
- if method.pagination_mode == 'cursor':
- self.iterator = CursorIterator(method, args, kargs)
- elif method.pagination_mode == 'id':
- self.iterator = IdIterator(method, args, kargs)
- elif method.pagination_mode == 'page':
- self.iterator = PageIterator(method, args, kargs)
- else:
- raise TweepError('Invalid pagination mode.')
- else:
- raise TweepError('This method does not perform pagination')
-
- def pages(self, limit=0):
- """Return iterator for pages"""
- if limit > 0:
- self.iterator.limit = limit
- return self.iterator
-
- def items(self, limit=0):
- """Return iterator for items in each page"""
- i = ItemIterator(self.iterator)
- i.limit = limit
- return i
-
-class BaseIterator(object):
-
- def __init__(self, method, args, kargs):
- self.method = method
- self.args = args
- self.kargs = kargs
- self.limit = 0
-
- def next(self):
- raise NotImplementedError
-
- def prev(self):
- raise NotImplementedError
-
- def __iter__(self):
- return self
-
-class CursorIterator(BaseIterator):
-
- def __init__(self, method, args, kargs):
- BaseIterator.__init__(self, method, args, kargs)
- self.next_cursor = -1
- self.prev_cursor = 0
- self.count = 0
-
- def next(self):
- if self.next_cursor == 0 or (self.limit and self.count == self.limit):
- raise StopIteration
- data, cursors = self.method(
- cursor=self.next_cursor, *self.args, **self.kargs
- )
- self.prev_cursor, self.next_cursor = cursors
- if len(data) == 0:
- raise StopIteration
- self.count += 1
- return data
-
- def prev(self):
- if self.prev_cursor == 0:
- raise TweepError('Can not page back more, at first page')
- data, self.next_cursor, self.prev_cursor = self.method(
- cursor=self.prev_cursor, *self.args, **self.kargs
- )
- self.count -= 1
- return data
-
-class IdIterator(BaseIterator):
-
- def __init__(self, method, args, kargs):
- BaseIterator.__init__(self, method, args, kargs)
- self.max_id = kargs.get('max_id')
- self.since_id = kargs.get('since_id')
- self.count = 0
-
- def next(self):
- """Fetch a set of items with IDs less than current set."""
- if self.limit and self.limit == self.count:
- raise StopIteration
-
- # max_id is inclusive so decrement by one
- # to avoid requesting duplicate items.
- max_id = self.since_id - 1 if self.max_id else None
- data = self.method(max_id = max_id, *self.args, **self.kargs)
- if len(data) == 0:
- raise StopIteration
- self.max_id = data.max_id
- self.since_id = data.since_id
- self.count += 1
- return data
-
- def prev(self):
- """Fetch a set of items with IDs greater than current set."""
- if self.limit and self.limit == self.count:
- raise StopIteration
-
- since_id = self.max_id
- data = self.method(since_id = since_id, *self.args, **self.kargs)
- if len(data) == 0:
- raise StopIteration
- self.max_id = data.max_id
- self.since_id = data.since_id
- self.count += 1
- return data
-
-class PageIterator(BaseIterator):
-
- def __init__(self, method, args, kargs):
- BaseIterator.__init__(self, method, args, kargs)
- self.current_page = 0
-
- def next(self):
- self.current_page += 1
- items = self.method(page=self.current_page, *self.args, **self.kargs)
- if len(items) == 0 or (self.limit > 0 and self.current_page > self.limit):
- raise StopIteration
- return items
-
- def prev(self):
- if (self.current_page == 1):
- raise TweepError('Can not page back more, at first page')
- self.current_page -= 1
- return self.method(page=self.current_page, *self.args, **self.kargs)
-
-class ItemIterator(BaseIterator):
-
- def __init__(self, page_iterator):
- self.page_iterator = page_iterator
- self.limit = 0
- self.current_page = None
- self.page_index = -1
- self.count = 0
-
- def next(self):
- if self.limit > 0 and self.count == self.limit:
- raise StopIteration
- if self.current_page is None or self.page_index == len(self.current_page) - 1:
- # Reached end of current page, get the next page...
- self.current_page = self.page_iterator.next()
- self.page_index = -1
- self.page_index += 1
- self.count += 1
- return self.current_page[self.page_index]
-
- def prev(self):
- if self.current_page is None:
- raise TweepError('Can not go back more, at first page')
- if self.page_index == 0:
- # At the beginning of the current page, move to next...
- self.current_page = self.page_iterator.prev()
- self.page_index = len(self.current_page)
- if self.page_index == 0:
- raise TweepError('No more items')
- self.page_index -= 1
- self.count -= 1
- return self.current_page[self.page_index]
-
diff --git a/lib/tweepy/error.py b/lib/tweepy/error.py
deleted file mode 100644
index 753e2fe..0000000
--- a/lib/tweepy/error.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-class TweepError(Exception):
- """Tweepy exception"""
-
- def __init__(self, reason, response=None):
- self.reason = unicode(reason)
- self.response = response
- Exception.__init__(self, reason)
-
- def __str__(self):
- return self.reason
-
diff --git a/lib/tweepy/models.py b/lib/tweepy/models.py
deleted file mode 100644
index 3442790..0000000
--- a/lib/tweepy/models.py
+++ /dev/null
@@ -1,431 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-from tweepy.error import TweepError
-from tweepy.utils import parse_datetime, parse_html_value, parse_a_href, \
- parse_search_datetime, unescape_html
-
-
-class ResultSet(list):
- """A list like object that holds results from a Twitter API query."""
- def __init__(self, max_id=None, since_id=None):
- super(ResultSet, self).__init__()
- self._max_id = max_id
- self._since_id = since_id
-
- @property
- def max_id(self):
- if self._max_id:
- return self._max_id
- ids = self.ids()
- return max(ids) if ids else None
-
- @property
- def since_id(self):
- if self._since_id:
- return self._since_id
- ids = self.ids()
- return min(ids) if ids else None
-
- def ids(self):
- return [item.id for item in self if hasattr(item, 'id')]
-
-class Model(object):
-
- def __init__(self, api=None):
- self._api = api
-
- def __getstate__(self):
- # pickle
- pickle = dict(self.__dict__)
- try:
- del pickle['_api'] # do not pickle the API reference
- except KeyError:
- pass
- return pickle
-
- @classmethod
- def parse(cls, api, json):
- """Parse a JSON object into a model instance."""
- raise NotImplementedError
-
- @classmethod
- def parse_list(cls, api, json_list):
- """Parse a list of JSON objects into a result set of model instances."""
- results = ResultSet()
- for obj in json_list:
- if obj:
- results.append(cls.parse(api, obj))
- return results
-
-
-class Status(Model):
-
- @classmethod
- def parse(cls, api, json):
- status = cls(api)
- for k, v in json.items():
- if k == 'user':
- user_model = getattr(api.parser.model_factory, 'user')
- user = user_model.parse(api, v)
- setattr(status, 'author', user)
- setattr(status, 'user', user) # DEPRECIATED
- elif k == 'created_at':
- setattr(status, k, parse_datetime(v))
- elif k == 'source':
- if '<' in v:
- setattr(status, k, parse_html_value(v))
- setattr(status, 'source_url', parse_a_href(v))
- else:
- setattr(status, k, v)
- setattr(status, 'source_url', None)
- elif k == 'retweeted_status':
- setattr(status, k, Status.parse(api, v))
- elif k == 'place':
- if v is not None:
- setattr(status, k, Place.parse(api, v))
- else:
- setattr(status, k, None)
- else:
- setattr(status, k, v)
- return status
-
- def destroy(self):
- return self._api.destroy_status(self.id)
-
- def retweet(self):
- return self._api.retweet(self.id)
-
- def retweets(self):
- return self._api.retweets(self.id)
-
- def favorite(self):
- return self._api.create_favorite(self.id)
-
-
-class User(Model):
-
- @classmethod
- def parse(cls, api, json):
- user = cls(api)
- for k, v in json.items():
- if k == 'created_at':
- setattr(user, k, parse_datetime(v))
- elif k == 'status':
- setattr(user, k, Status.parse(api, v))
- elif k == 'following':
- # twitter sets this to null if it is false
- if v is True:
- setattr(user, k, True)
- else:
- setattr(user, k, False)
- else:
- setattr(user, k, v)
- return user
-
- @classmethod
- def parse_list(cls, api, json_list):
- if isinstance(json_list, list):
- item_list = json_list
- else:
- item_list = json_list['users']
-
- results = ResultSet()
- for obj in item_list:
- results.append(cls.parse(api, obj))
- return results
-
- def timeline(self, **kargs):
- return self._api.user_timeline(user_id=self.id, **kargs)
-
- def friends(self, **kargs):
- return self._api.friends(user_id=self.id, **kargs)
-
- def followers(self, **kargs):
- return self._api.followers(user_id=self.id, **kargs)
-
- def follow(self):
- self._api.create_friendship(user_id=self.id)
- self.following = True
-
- def unfollow(self):
- self._api.destroy_friendship(user_id=self.id)
- self.following = False
-
- def lists_memberships(self, *args, **kargs):
- return self._api.lists_memberships(user=self.screen_name, *args, **kargs)
-
- def lists_subscriptions(self, *args, **kargs):
- return self._api.lists_subscriptions(user=self.screen_name, *args, **kargs)
-
- def lists(self, *args, **kargs):
- return self._api.lists(user=self.screen_name, *args, **kargs)
-
- def followers_ids(self, *args, **kargs):
- return self._api.followers_ids(user_id=self.id, *args, **kargs)
-
-
-class DirectMessage(Model):
-
- @classmethod
- def parse(cls, api, json):
- dm = cls(api)
- for k, v in json.items():
- if k == 'sender' or k == 'recipient':
- setattr(dm, k, User.parse(api, v))
- elif k == 'created_at':
- setattr(dm, k, parse_datetime(v))
- else:
- setattr(dm, k, v)
- return dm
-
- def destroy(self):
- return self._api.destroy_direct_message(self.id)
-
-
-class Friendship(Model):
-
- @classmethod
- def parse(cls, api, json):
- relationship = json['relationship']
-
- # parse source
- source = cls(api)
- for k, v in relationship['source'].items():
- setattr(source, k, v)
-
- # parse target
- target = cls(api)
- for k, v in relationship['target'].items():
- setattr(target, k, v)
-
- return source, target
-
-
-class Category(Model):
-
- @classmethod
- def parse(cls, api, json):
- category = cls(api)
- for k, v in json.items():
- setattr(category, k, v)
- return category
-
-
-class SavedSearch(Model):
-
- @classmethod
- def parse(cls, api, json):
- ss = cls(api)
- for k, v in json.items():
- if k == 'created_at':
- setattr(ss, k, parse_datetime(v))
- else:
- setattr(ss, k, v)
- return ss
-
- def destroy(self):
- return self._api.destroy_saved_search(self.id)
-
-
-class SearchResults(ResultSet):
-
- @classmethod
- def parse(cls, api, json):
- metadata = json['search_metadata']
- results = SearchResults(metadata.get('max_id'), metadata.get('since_id'))
- results.refresh_url = metadata.get('refresh_url')
- results.completed_in = metadata.get('completed_in')
- results.query = metadata.get('query')
-
- for status in json['statuses']:
- results.append(Status.parse(api, status))
- return results
-
-
-class List(Model):
-
- @classmethod
- def parse(cls, api, json):
- lst = List(api)
- for k,v in json.items():
- if k == 'user':
- setattr(lst, k, User.parse(api, v))
- elif k == 'created_at':
- setattr(lst, k, parse_datetime(v))
- else:
- setattr(lst, k, v)
- return lst
-
- @classmethod
- def parse_list(cls, api, json_list, result_set=None):
- results = ResultSet()
- if isinstance(json_list, dict):
- json_list = json_list['lists']
- for obj in json_list:
- results.append(cls.parse(api, obj))
- return results
-
- def update(self, **kargs):
- return self._api.update_list(self.slug, **kargs)
-
- def destroy(self):
- return self._api.destroy_list(self.slug)
-
- def timeline(self, **kargs):
- return self._api.list_timeline(self.user.screen_name, self.slug, **kargs)
-
- def add_member(self, id):
- return self._api.add_list_member(self.slug, id)
-
- def remove_member(self, id):
- return self._api.remove_list_member(self.slug, id)
-
- def members(self, **kargs):
- return self._api.list_members(self.user.screen_name, self.slug, **kargs)
-
- def is_member(self, id):
- return self._api.is_list_member(self.user.screen_name, self.slug, id)
-
- def subscribe(self):
- return self._api.subscribe_list(self.user.screen_name, self.slug)
-
- def unsubscribe(self):
- return self._api.unsubscribe_list(self.user.screen_name, self.slug)
-
- def subscribers(self, **kargs):
- return self._api.list_subscribers(self.user.screen_name, self.slug, **kargs)
-
- def is_subscribed(self, id):
- return self._api.is_subscribed_list(self.user.screen_name, self.slug, id)
-
-class Relation(Model):
- @classmethod
- def parse(cls, api, json):
- result = cls(api)
- for k,v in json.items():
- if k == 'value' and json['kind'] in ['Tweet', 'LookedupStatus']:
- setattr(result, k, Status.parse(api, v))
- elif k == 'results':
- setattr(result, k, Relation.parse_list(api, v))
- else:
- setattr(result, k, v)
- return result
-
-class Relationship(Model):
- @classmethod
- def parse(cls, api, json):
- result = cls(api)
- for k,v in json.items():
- if k == 'connections':
- setattr(result, 'is_following', 'following' in v)
- setattr(result, 'is_followed_by', 'followed_by' in v)
- else:
- setattr(result, k, v)
- return result
-
-class JSONModel(Model):
-
- @classmethod
- def parse(cls, api, json):
- return json
-
-
-class IDModel(Model):
-
- @classmethod
- def parse(cls, api, json):
- if isinstance(json, list):
- return json
- else:
- return json['ids']
-
-
-class BoundingBox(Model):
-
- @classmethod
- def parse(cls, api, json):
- result = cls(api)
- if json is not None:
- for k, v in json.items():
- setattr(result, k, v)
- return result
-
- def origin(self):
- """
- Return longitude, latitude of southwest (bottom, left) corner of
- bounding box, as a tuple.
-
- This assumes that bounding box is always a rectangle, which
- appears to be the case at present.
- """
- return tuple(self.coordinates[0][0])
-
- def corner(self):
- """
- Return longitude, latitude of northeast (top, right) corner of
- bounding box, as a tuple.
-
- This assumes that bounding box is always a rectangle, which
- appears to be the case at present.
- """
- return tuple(self.coordinates[0][2])
-
-
-class Place(Model):
-
- @classmethod
- def parse(cls, api, json):
- place = cls(api)
- for k, v in json.items():
- if k == 'bounding_box':
- # bounding_box value may be null (None.)
- # Example: "United States" (id=96683cc9126741d1)
- if v is not None:
- t = BoundingBox.parse(api, v)
- else:
- t = v
- setattr(place, k, t)
- elif k == 'contained_within':
- # contained_within is a list of Places.
- setattr(place, k, Place.parse_list(api, v))
- else:
- setattr(place, k, v)
- return place
-
- @classmethod
- def parse_list(cls, api, json_list):
- if isinstance(json_list, list):
- item_list = json_list
- else:
- item_list = json_list['result']['places']
-
- results = ResultSet()
- for obj in item_list:
- results.append(cls.parse(api, obj))
- return results
-
-class ModelFactory(object):
- """
- Used by parsers for creating instances
- of models. You may subclass this factory
- to add your own extended models.
- """
-
- status = Status
- user = User
- direct_message = DirectMessage
- friendship = Friendship
- saved_search = SavedSearch
- search_results = SearchResults
- category = Category
- list = List
- relation = Relation
- relationship = Relationship
-
- json = JSONModel
- ids = IDModel
- place = Place
- bounding_box = BoundingBox
-
diff --git a/lib/tweepy/oauth.py b/lib/tweepy/oauth.py
deleted file mode 100644
index 286de18..0000000
--- a/lib/tweepy/oauth.py
+++ /dev/null
@@ -1,655 +0,0 @@
-"""
-The MIT License
-
-Copyright (c) 2007 Leah Culver
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import cgi
-import urllib
-import time
-import random
-import urlparse
-import hmac
-import binascii
-
-
-VERSION = '1.0' # Hi Blaine!
-HTTP_METHOD = 'GET'
-SIGNATURE_METHOD = 'PLAINTEXT'
-
-
-class OAuthError(RuntimeError):
- """Generic exception class."""
- def __init__(self, message='OAuth error occured.'):
- self.message = message
-
-def build_authenticate_header(realm=''):
- """Optional WWW-Authenticate header (401 error)"""
- return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
-
-def escape(s):
- """Escape a URL including any /."""
- return urllib.quote(s, safe='~')
-
-def _utf8_str(s):
- """Convert unicode to utf-8."""
- if isinstance(s, unicode):
- return s.encode("utf-8")
- else:
- return str(s)
-
-def generate_timestamp():
- """Get seconds since epoch (UTC)."""
- return int(time.time())
-
-def generate_nonce(length=8):
- """Generate pseudorandom number."""
- return ''.join([str(random.randint(0, 9)) for i in range(length)])
-
-def generate_verifier(length=8):
- """Generate pseudorandom number."""
- return ''.join([str(random.randint(0, 9)) for i in range(length)])
-
-
-class OAuthConsumer(object):
- """Consumer of OAuth authentication.
-
- OAuthConsumer is a data type that represents the identity of the Consumer
- via its shared secret with the Service Provider.
-
- """
- key = None
- secret = None
-
- def __init__(self, key, secret):
- self.key = key
- self.secret = secret
-
-
-class OAuthToken(object):
- """OAuthToken is a data type that represents an End User via either an access
- or request token.
-
- key -- the token
- secret -- the token secret
-
- """
- key = None
- secret = None
- callback = None
- callback_confirmed = None
- verifier = None
-
- def __init__(self, key, secret):
- self.key = key
- self.secret = secret
-
- def set_callback(self, callback):
- self.callback = callback
- self.callback_confirmed = 'true'
-
- def set_verifier(self, verifier=None):
- if verifier is not None:
- self.verifier = verifier
- else:
- self.verifier = generate_verifier()
-
- def get_callback_url(self):
- if self.callback and self.verifier:
- # Append the oauth_verifier.
- parts = urlparse.urlparse(self.callback)
- scheme, netloc, path, params, query, fragment = parts[:6]
- if query:
- query = '%s&oauth_verifier=%s' % (query, self.verifier)
- else:
- query = 'oauth_verifier=%s' % self.verifier
- return urlparse.urlunparse((scheme, netloc, path, params,
- query, fragment))
- return self.callback
-
- def to_string(self):
- data = {
- 'oauth_token': self.key,
- 'oauth_token_secret': self.secret,
- }
- if self.callback_confirmed is not None:
- data['oauth_callback_confirmed'] = self.callback_confirmed
- return urllib.urlencode(data)
-
- def from_string(s):
- """ Returns a token from something like:
- oauth_token_secret=xxx&oauth_token=xxx
- """
- params = cgi.parse_qs(s, keep_blank_values=False)
- key = params['oauth_token'][0]
- secret = params['oauth_token_secret'][0]
- token = OAuthToken(key, secret)
- try:
- token.callback_confirmed = params['oauth_callback_confirmed'][0]
- except KeyError:
- pass # 1.0, no callback confirmed.
- return token
- from_string = staticmethod(from_string)
-
- def __str__(self):
- return self.to_string()
-
-
-class OAuthRequest(object):
- """OAuthRequest represents the request and can be serialized.
-
- OAuth parameters:
- - oauth_consumer_key
- - oauth_token
- - oauth_signature_method
- - oauth_signature
- - oauth_timestamp
- - oauth_nonce
- - oauth_version
- - oauth_verifier
- ... any additional parameters, as defined by the Service Provider.
- """
- parameters = None # OAuth parameters.
- http_method = HTTP_METHOD
- http_url = None
- version = VERSION
-
- def __init__(self, http_method=HTTP_METHOD, http_url=None, parameters=None):
- self.http_method = http_method
- self.http_url = http_url
- self.parameters = parameters or {}
-
- def set_parameter(self, parameter, value):
- self.parameters[parameter] = value
-
- def get_parameter(self, parameter):
- try:
- return self.parameters[parameter]
- except:
- raise OAuthError('Parameter not found: %s' % parameter)
-
- def _get_timestamp_nonce(self):
- return self.get_parameter('oauth_timestamp'), self.get_parameter(
- 'oauth_nonce')
-
- def get_nonoauth_parameters(self):
- """Get any non-OAuth parameters."""
- parameters = {}
- for k, v in self.parameters.iteritems():
- # Ignore oauth parameters.
- if k.find('oauth_') < 0:
- parameters[k] = v
- return parameters
-
- def to_header(self, realm=''):
- """Serialize as a header for an HTTPAuth request."""
- auth_header = 'OAuth realm="%s"' % realm
- # Add the oauth parameters.
- if self.parameters:
- for k, v in self.parameters.iteritems():
- if k[:6] == 'oauth_':
- auth_header += ', %s="%s"' % (k, escape(str(v)))
- return {'Authorization': auth_header}
-
- def to_postdata(self):
- """Serialize as post data for a POST request."""
- return '&'.join(['%s=%s' % (escape(str(k)), escape(str(v))) \
- for k, v in self.parameters.iteritems()])
-
- def to_url(self):
- """Serialize as a URL for a GET request."""
- return '%s?%s' % (self.get_normalized_http_url(), self.to_postdata())
-
- def get_normalized_parameters(self):
- """Return a string that contains the parameters that must be signed."""
- params = self.parameters
- try:
- # Exclude the signature if it exists.
- del params['oauth_signature']
- except:
- pass
- # Escape key values before sorting.
- key_values = [(escape(_utf8_str(k)), escape(_utf8_str(v))) \
- for k,v in params.items()]
- # Sort lexicographically, first after key, then after value.
- key_values.sort()
- # Combine key value pairs into a string.
- return '&'.join(['%s=%s' % (k, v) for k, v in key_values])
-
- def get_normalized_http_method(self):
- """Uppercases the http method."""
- return self.http_method.upper()
-
- def get_normalized_http_url(self):
- """Parses the URL and rebuilds it to be scheme://host/path."""
- parts = urlparse.urlparse(self.http_url)
- scheme, netloc, path = parts[:3]
- # Exclude default port numbers.
- if scheme == 'http' and netloc[-3:] == ':80':
- netloc = netloc[:-3]
- elif scheme == 'https' and netloc[-4:] == ':443':
- netloc = netloc[:-4]
- return '%s://%s%s' % (scheme, netloc, path)
-
- def sign_request(self, signature_method, consumer, token):
- """Set the signature parameter to the result of build_signature."""
- # Set the signature method.
- self.set_parameter('oauth_signature_method',
- signature_method.get_name())
- # Set the signature.
- self.set_parameter('oauth_signature',
- self.build_signature(signature_method, consumer, token))
-
- def build_signature(self, signature_method, consumer, token):
- """Calls the build signature method within the signature method."""
- return signature_method.build_signature(self, consumer, token)
-
- def from_request(http_method, http_url, headers=None, parameters=None,
- query_string=None):
- """Combines multiple parameter sources."""
- if parameters is None:
- parameters = {}
-
- # Headers
- if headers and 'Authorization' in headers:
- auth_header = headers['Authorization']
- # Check that the authorization header is OAuth.
- if auth_header[:6] == 'OAuth ':
- auth_header = auth_header[6:]
- try:
- # Get the parameters from the header.
- header_params = OAuthRequest._split_header(auth_header)
- parameters.update(header_params)
- except:
- raise OAuthError('Unable to parse OAuth parameters from '
- 'Authorization header.')
-
- # GET or POST query string.
- if query_string:
- query_params = OAuthRequest._split_url_string(query_string)
- parameters.update(query_params)
-
- # URL parameters.
- param_str = urlparse.urlparse(http_url)[4] # query
- url_params = OAuthRequest._split_url_string(param_str)
- parameters.update(url_params)
-
- if parameters:
- return OAuthRequest(http_method, http_url, parameters)
-
- return None
- from_request = staticmethod(from_request)
-
- def from_consumer_and_token(oauth_consumer, token=None,
- callback=None, verifier=None, http_method=HTTP_METHOD,
- http_url=None, parameters=None):
- if not parameters:
- parameters = {}
-
- defaults = {
- 'oauth_consumer_key': oauth_consumer.key,
- 'oauth_timestamp': generate_timestamp(),
- 'oauth_nonce': generate_nonce(),
- 'oauth_version': OAuthRequest.version,
- }
-
- defaults.update(parameters)
- parameters = defaults
-
- if token:
- parameters['oauth_token'] = token.key
- if token.callback:
- parameters['oauth_callback'] = token.callback
- # 1.0a support for verifier.
- if verifier:
- parameters['oauth_verifier'] = verifier
- elif callback:
- # 1.0a support for callback in the request token request.
- parameters['oauth_callback'] = callback
-
- return OAuthRequest(http_method, http_url, parameters)
- from_consumer_and_token = staticmethod(from_consumer_and_token)
-
- def from_token_and_callback(token, callback=None, http_method=HTTP_METHOD,
- http_url=None, parameters=None):
- if not parameters:
- parameters = {}
-
- parameters['oauth_token'] = token.key
-
- if callback:
- parameters['oauth_callback'] = callback
-
- return OAuthRequest(http_method, http_url, parameters)
- from_token_and_callback = staticmethod(from_token_and_callback)
-
- def _split_header(header):
- """Turn Authorization: header into parameters."""
- params = {}
- parts = header.split(',')
- for param in parts:
- # Ignore realm parameter.
- if param.find('realm') > -1:
- continue
- # Remove whitespace.
- param = param.strip()
- # Split key-value.
- param_parts = param.split('=', 1)
- # Remove quotes and unescape the value.
- params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
- return params
- _split_header = staticmethod(_split_header)
-
- def _split_url_string(param_str):
- """Turn URL string into parameters."""
- parameters = cgi.parse_qs(param_str, keep_blank_values=False)
- for k, v in parameters.iteritems():
- parameters[k] = urllib.unquote(v[0])
- return parameters
- _split_url_string = staticmethod(_split_url_string)
-
-class OAuthServer(object):
- """A worker to check the validity of a request against a data store."""
- timestamp_threshold = 300 # In seconds, five minutes.
- version = VERSION
- signature_methods = None
- data_store = None
-
- def __init__(self, data_store=None, signature_methods=None):
- self.data_store = data_store
- self.signature_methods = signature_methods or {}
-
- def set_data_store(self, data_store):
- self.data_store = data_store
-
- def get_data_store(self):
- return self.data_store
-
- def add_signature_method(self, signature_method):
- self.signature_methods[signature_method.get_name()] = signature_method
- return self.signature_methods
-
- def fetch_request_token(self, oauth_request):
- """Processes a request_token request and returns the
- request token on success.
- """
- try:
- # Get the request token for authorization.
- token = self._get_token(oauth_request, 'request')
- except OAuthError:
- # No token required for the initial token request.
- version = self._get_version(oauth_request)
- consumer = self._get_consumer(oauth_request)
- try:
- callback = self.get_callback(oauth_request)
- except OAuthError:
- callback = None # 1.0, no callback specified.
- self._check_signature(oauth_request, consumer, None)
- # Fetch a new token.
- token = self.data_store.fetch_request_token(consumer, callback)
- return token
-
- def fetch_access_token(self, oauth_request):
- """Processes an access_token request and returns the
- access token on success.
- """
- version = self._get_version(oauth_request)
- consumer = self._get_consumer(oauth_request)
- try:
- verifier = self._get_verifier(oauth_request)
- except OAuthError:
- verifier = None
- # Get the request token.
- token = self._get_token(oauth_request, 'request')
- self._check_signature(oauth_request, consumer, token)
- new_token = self.data_store.fetch_access_token(consumer, token, verifier)
- return new_token
-
- def verify_request(self, oauth_request):
- """Verifies an api call and checks all the parameters."""
- # -> consumer and token
- version = self._get_version(oauth_request)
- consumer = self._get_consumer(oauth_request)
- # Get the access token.
- token = self._get_token(oauth_request, 'access')
- self._check_signature(oauth_request, consumer, token)
- parameters = oauth_request.get_nonoauth_parameters()
- return consumer, token, parameters
-
- def authorize_token(self, token, user):
- """Authorize a request token."""
- return self.data_store.authorize_request_token(token, user)
-
- def get_callback(self, oauth_request):
- """Get the callback URL."""
- return oauth_request.get_parameter('oauth_callback')
-
- def build_authenticate_header(self, realm=''):
- """Optional support for the authenticate header."""
- return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
-
- def _get_version(self, oauth_request):
- """Verify the correct version request for this server."""
- try:
- version = oauth_request.get_parameter('oauth_version')
- except:
- version = VERSION
- if version and version != self.version:
- raise OAuthError('OAuth version %s not supported.' % str(version))
- return version
-
- def _get_signature_method(self, oauth_request):
- """Figure out the signature with some defaults."""
- try:
- signature_method = oauth_request.get_parameter(
- 'oauth_signature_method')
- except:
- signature_method = SIGNATURE_METHOD
- try:
- # Get the signature method object.
- signature_method = self.signature_methods[signature_method]
- except:
- signature_method_names = ', '.join(self.signature_methods.keys())
- raise OAuthError('Signature method %s not supported try one of the '
- 'following: %s' % (signature_method, signature_method_names))
-
- return signature_method
-
- def _get_consumer(self, oauth_request):
- consumer_key = oauth_request.get_parameter('oauth_consumer_key')
- consumer = self.data_store.lookup_consumer(consumer_key)
- if not consumer:
- raise OAuthError('Invalid consumer.')
- return consumer
-
- def _get_token(self, oauth_request, token_type='access'):
- """Try to find the token for the provided request token key."""
- token_field = oauth_request.get_parameter('oauth_token')
- token = self.data_store.lookup_token(token_type, token_field)
- if not token:
- raise OAuthError('Invalid %s token: %s' % (token_type, token_field))
- return token
-
- def _get_verifier(self, oauth_request):
- return oauth_request.get_parameter('oauth_verifier')
-
- def _check_signature(self, oauth_request, consumer, token):
- timestamp, nonce = oauth_request._get_timestamp_nonce()
- self._check_timestamp(timestamp)
- self._check_nonce(consumer, token, nonce)
- signature_method = self._get_signature_method(oauth_request)
- try:
- signature = oauth_request.get_parameter('oauth_signature')
- except:
- raise OAuthError('Missing signature.')
- # Validate the signature.
- valid_sig = signature_method.check_signature(oauth_request, consumer,
- token, signature)
- if not valid_sig:
- key, base = signature_method.build_signature_base_string(
- oauth_request, consumer, token)
- raise OAuthError('Invalid signature. Expected signature base '
- 'string: %s' % base)
- built = signature_method.build_signature(oauth_request, consumer, token)
-
- def _check_timestamp(self, timestamp):
- """Verify that timestamp is recentish."""
- timestamp = int(timestamp)
- now = int(time.time())
- lapsed = abs(now - timestamp)
- if lapsed > self.timestamp_threshold:
- raise OAuthError('Expired timestamp: given %d and now %s has a '
- 'greater difference than threshold %d' %
- (timestamp, now, self.timestamp_threshold))
-
- def _check_nonce(self, consumer, token, nonce):
- """Verify that the nonce is uniqueish."""
- nonce = self.data_store.lookup_nonce(consumer, token, nonce)
- if nonce:
- raise OAuthError('Nonce already used: %s' % str(nonce))
-
-
-class OAuthClient(object):
- """OAuthClient is a worker to attempt to execute a request."""
- consumer = None
- token = None
-
- def __init__(self, oauth_consumer, oauth_token):
- self.consumer = oauth_consumer
- self.token = oauth_token
-
- def get_consumer(self):
- return self.consumer
-
- def get_token(self):
- return self.token
-
- def fetch_request_token(self, oauth_request):
- """-> OAuthToken."""
- raise NotImplementedError
-
- def fetch_access_token(self, oauth_request):
- """-> OAuthToken."""
- raise NotImplementedError
-
- def access_resource(self, oauth_request):
- """-> Some protected resource."""
- raise NotImplementedError
-
-
-class OAuthDataStore(object):
- """A database abstraction used to lookup consumers and tokens."""
-
- def lookup_consumer(self, key):
- """-> OAuthConsumer."""
- raise NotImplementedError
-
- def lookup_token(self, oauth_consumer, token_type, token_token):
- """-> OAuthToken."""
- raise NotImplementedError
-
- def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
- """-> OAuthToken."""
- raise NotImplementedError
-
- def fetch_request_token(self, oauth_consumer, oauth_callback):
- """-> OAuthToken."""
- raise NotImplementedError
-
- def fetch_access_token(self, oauth_consumer, oauth_token, oauth_verifier):
- """-> OAuthToken."""
- raise NotImplementedError
-
- def authorize_request_token(self, oauth_token, user):
- """-> OAuthToken."""
- raise NotImplementedError
-
-
-class OAuthSignatureMethod(object):
- """A strategy class that implements a signature method."""
- def get_name(self):
- """-> str."""
- raise NotImplementedError
-
- def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token):
- """-> str key, str raw."""
- raise NotImplementedError
-
- def build_signature(self, oauth_request, oauth_consumer, oauth_token):
- """-> str."""
- raise NotImplementedError
-
- def check_signature(self, oauth_request, consumer, token, signature):
- built = self.build_signature(oauth_request, consumer, token)
- return built == signature
-
-
-class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod):
-
- def get_name(self):
- return 'HMAC-SHA1'
-
- def build_signature_base_string(self, oauth_request, consumer, token):
- sig = (
- escape(oauth_request.get_normalized_http_method()),
- escape(oauth_request.get_normalized_http_url()),
- escape(oauth_request.get_normalized_parameters()),
- )
-
- key = '%s&' % escape(consumer.secret)
- if token:
- key += escape(token.secret)
- raw = '&'.join(sig)
- return key, raw
-
- def build_signature(self, oauth_request, consumer, token):
- """Builds the base signature string."""
- key, raw = self.build_signature_base_string(oauth_request, consumer,
- token)
-
- # HMAC object.
- try:
- import hashlib # 2.5
- hashed = hmac.new(key, raw, hashlib.sha1)
- except:
- import sha # Deprecated
- hashed = hmac.new(key, raw, sha)
-
- # Calculate the digest base 64.
- return binascii.b2a_base64(hashed.digest())[:-1]
-
-
-class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod):
-
- def get_name(self):
- return 'PLAINTEXT'
-
- def build_signature_base_string(self, oauth_request, consumer, token):
- """Concatenates the consumer key and secret."""
- sig = '%s&' % escape(consumer.secret)
- if token:
- sig = sig + escape(token.secret)
- return sig, sig
-
- def build_signature(self, oauth_request, consumer, token):
- key, raw = self.build_signature_base_string(oauth_request, consumer,
- token)
- return key
\ No newline at end of file
diff --git a/lib/tweepy/parsers.py b/lib/tweepy/parsers.py
deleted file mode 100644
index 55a5ba8..0000000
--- a/lib/tweepy/parsers.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-from tweepy.models import ModelFactory
-from tweepy.utils import import_simplejson
-from tweepy.error import TweepError
-
-
-class Parser(object):
-
- def parse(self, method, payload):
- """
- Parse the response payload and return the result.
- Returns a tuple that contains the result data and the cursors
- (or None if not present).
- """
- raise NotImplementedError
-
- def parse_error(self, payload):
- """
- Parse the error message from payload.
- If unable to parse the message, throw an exception
- and default error message will be used.
- """
- raise NotImplementedError
-
-
-class RawParser(Parser):
-
- def __init__(self):
- pass
-
- def parse(self, method, payload):
- return payload
-
- def parse_error(self, payload):
- return payload
-
-
-class JSONParser(Parser):
-
- payload_format = 'json'
-
- def __init__(self):
- self.json_lib = import_simplejson()
-
- def parse(self, method, payload):
- try:
- json = self.json_lib.loads(payload)
- except Exception, e:
- raise TweepError('Failed to parse JSON payload: %s' % e)
-
- needsCursors = method.parameters.has_key('cursor')
- if needsCursors and isinstance(json, dict) and 'previous_cursor' in json and 'next_cursor' in json:
- cursors = json['previous_cursor'], json['next_cursor']
- return json, cursors
- else:
- return json
-
- def parse_error(self, payload):
- error = self.json_lib.loads(payload)
- if error.has_key('error'):
- return error['error']
- else:
- return error['errors']
-
-
-class ModelParser(JSONParser):
-
- def __init__(self, model_factory=None):
- JSONParser.__init__(self)
- self.model_factory = model_factory or ModelFactory
-
- def parse(self, method, payload):
- try:
- if method.payload_type is None: return
- model = getattr(self.model_factory, method.payload_type)
- except AttributeError:
- raise TweepError('No model for this payload type: %s' % method.payload_type)
-
- json = JSONParser.parse(self, method, payload)
- if isinstance(json, tuple):
- json, cursors = json
- else:
- cursors = None
-
- if method.payload_list:
- result = model.parse_list(method.api, json)
- else:
- result = model.parse(method.api, json)
-
- if cursors:
- return result, cursors
- else:
- return result
-
diff --git a/lib/tweepy/streaming.py b/lib/tweepy/streaming.py
deleted file mode 100644
index f6d37f4..0000000
--- a/lib/tweepy/streaming.py
+++ /dev/null
@@ -1,248 +0,0 @@
-# Tweepy
-# Copyright 2009-2010 Joshua Roesslein
-# See LICENSE for details.
-
-import httplib
-from socket import timeout
-from threading import Thread
-from time import sleep
-
-from tweepy.models import Status
-from tweepy.api import API
-from tweepy.error import TweepError
-
-from tweepy.utils import import_simplejson, urlencode_noplus
-json = import_simplejson()
-
-STREAM_VERSION = '1.1'
-
-
-class StreamListener(object):
-
- def __init__(self, api=None):
- self.api = api or API()
-
- def on_connect(self):
- """Called once connected to streaming server.
-
- This will be invoked once a successful response
- is received from the server. Allows the listener
- to perform some work prior to entering the read loop.
- """
- pass
-
- def on_data(self, data):
- """Called when raw data is received from connection.
-
- Override this method if you wish to manually handle
- the stream data. Return False to stop stream and close connection.
- """
-
- if 'in_reply_to_status_id' in data:
- status = Status.parse(self.api, json.loads(data))
- if self.on_status(status) is False:
- return False
- elif 'delete' in data:
- delete = json.loads(data)['delete']['status']
- if self.on_delete(delete['id'], delete['user_id']) is False:
- return False
- elif 'limit' in data:
- if self.on_limit(json.loads(data)['limit']['track']) is False:
- return False
-
- def on_status(self, status):
- """Called when a new status arrives"""
- return
-
- def on_delete(self, status_id, user_id):
- """Called when a delete notice arrives for a status"""
- return
-
- def on_limit(self, track):
- """Called when a limitation notice arrvies"""
- return
-
- def on_error(self, status_code):
- """Called when a non-200 status code is returned"""
- return False
-
- def on_timeout(self):
- """Called when stream connection times out"""
- return
-
-
-class Stream(object):
-
- host = 'stream.twitter.com'
-
- def __init__(self, auth, listener, **options):
- self.auth = auth
- self.listener = listener
- self.running = False
- self.timeout = options.get("timeout", 300.0)
- self.retry_count = options.get("retry_count")
- self.retry_time = options.get("retry_time", 10.0)
- self.snooze_time = options.get("snooze_time", 5.0)
- self.buffer_size = options.get("buffer_size", 1500)
- if options.get("secure", True):
- self.scheme = "https"
- else:
- self.scheme = "http"
-
- self.api = API()
- self.headers = options.get("headers") or {}
- self.parameters = None
- self.body = None
-
- def _run(self):
- # Authenticate
- url = "%s://%s%s" % (self.scheme, self.host, self.url)
-
- # Connect and process the stream
- error_counter = 0
- conn = None
- exception = None
- while self.running:
- if self.retry_count is not None and error_counter > self.retry_count:
- # quit if error count greater than retry count
- break
- try:
- if self.scheme == "http":
- conn = httplib.HTTPConnection(self.host)
- else:
- conn = httplib.HTTPSConnection(self.host)
- self.auth.apply_auth(url, 'POST', self.headers, self.parameters)
- conn.connect()
- conn.sock.settimeout(self.timeout)
- conn.request('POST', self.url, self.body, headers=self.headers)
- resp = conn.getresponse()
- if resp.status != 200:
- if self.listener.on_error(resp.status) is False:
- break
- error_counter += 1
- sleep(self.retry_time)
- else:
- error_counter = 0
- self.listener.on_connect()
- self._read_loop(resp)
- except timeout:
- if self.listener.on_timeout() == False:
- break
- if self.running is False:
- break
- conn.close()
- sleep(self.snooze_time)
- except Exception, exception:
- # any other exception is fatal, so kill loop
- break
-
- # cleanup
- self.running = False
- if conn:
- conn.close()
-
- if exception:
- raise
-
- def _data(self, data):
- if self.listener.on_data(data) is False:
- self.running = False
-
- def _read_loop(self, resp):
-
- while self.running and not resp.isclosed():
-
- # Note: keep-alive newlines might be inserted before each length value.
- # read until we get a digit...
- c = '\n'
- while c == '\n' and self.running and not resp.isclosed():
- c = resp.read(1)
- delimited_string = c
-
- # read rest of delimiter length..
- d = ''
- while d != '\n' and self.running and not resp.isclosed():
- d = resp.read(1)
- delimited_string += d
-
- # read the next twitter status object
- if delimited_string.strip().isdigit():
- next_status_obj = resp.read( int(delimited_string) )
- self._data(next_status_obj)
-
- if resp.isclosed():
- self.on_closed(resp)
-
- def _start(self, async):
- self.running = True
- if async:
- Thread(target=self._run).start()
- else:
- self._run()
-
- def on_closed(self, resp):
- """ Called when the response has been closed by Twitter """
- pass
-
- def userstream(self, count=None, async=False, secure=True):
- self.parameters = {'delimited': 'length'}
- if self.running:
- raise TweepError('Stream object already connected!')
- self.url = '/2/user.json?delimited=length'
- self.host='userstream.twitter.com'
- self._start(async)
-
- def firehose(self, count=None, async=False):
- self.parameters = {'delimited': 'length'}
- if self.running:
- raise TweepError('Stream object already connected!')
- self.url = '/%s/statuses/firehose.json?delimited=length' % STREAM_VERSION
- if count:
- self.url += '&count=%s' % count
- self._start(async)
-
- def retweet(self, async=False):
- self.parameters = {'delimited': 'length'}
- if self.running:
- raise TweepError('Stream object already connected!')
- self.url = '/%s/statuses/retweet.json?delimited=length' % STREAM_VERSION
- self._start(async)
-
- def sample(self, count=None, async=False):
- self.parameters = {'delimited': 'length'}
- if self.running:
- raise TweepError('Stream object already connected!')
- self.url = '/%s/statuses/sample.json?delimited=length' % STREAM_VERSION
- if count:
- self.url += '&count=%s' % count
- self._start(async)
-
- def filter(self, follow=None, track=None, async=False, locations=None,
- count = None, stall_warnings=False, languages=None):
- self.parameters = {}
- self.headers['Content-type'] = "application/x-www-form-urlencoded"
- if self.running:
- raise TweepError('Stream object already connected!')
- self.url = '/%s/statuses/filter.json?delimited=length' % STREAM_VERSION
- if follow:
- self.parameters['follow'] = ','.join(map(str, follow))
- if track:
- self.parameters['track'] = ','.join(map(str, track))
- if locations and len(locations) > 0:
- assert len(locations) % 4 == 0
- self.parameters['locations'] = ','.join(['%.2f' % l for l in locations])
- if count:
- self.parameters['count'] = count
- if stall_warnings:
- self.parameters['stall_warnings'] = stall_warnings
- if languages:
- self.parameters['language'] = ','.join(map(str, languages))
- self.body = urlencode_noplus(self.parameters)
- self.parameters['delimited'] = 'length'
- self._start(async)
-
- def disconnect(self):
- if self.running is False:
- return
- self.running = False
-
diff --git a/lib/tweepy/utils.py b/lib/tweepy/utils.py
deleted file mode 100644
index 52c6c79..0000000
--- a/lib/tweepy/utils.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Tweepy
-# Copyright 2010 Joshua Roesslein
-# See LICENSE for details.
-
-from datetime import datetime
-import time
-import htmlentitydefs
-import re
-import locale
-from urllib import quote
-
-
-def parse_datetime(string):
- # Set locale for date parsing
- locale.setlocale(locale.LC_TIME, 'C')
-
- # We must parse datetime this way to work in python 2.4
- date = datetime(*(time.strptime(string, '%a %b %d %H:%M:%S +0000 %Y')[0:6]))
-
- # Reset locale back to the default setting
- locale.setlocale(locale.LC_TIME, '')
- return date
-
-
-def parse_html_value(html):
-
- return html[html.find('>')+1:html.rfind('<')]
-
-
-def parse_a_href(atag):
-
- start = atag.find('"') + 1
- end = atag.find('"', start)
- return atag[start:end]
-
-
-def parse_search_datetime(string):
- # Set locale for date parsing
- locale.setlocale(locale.LC_TIME, 'C')
-
- # We must parse datetime this way to work in python 2.4
- date = datetime(*(time.strptime(string, '%a, %d %b %Y %H:%M:%S +0000')[0:6]))
-
- # Reset locale back to the default setting
- locale.setlocale(locale.LC_TIME, '')
- return date
-
-
-def unescape_html(text):
- """Created by Fredrik Lundh (http://effbot.org/zone/re-sub.htm#unescape-html)"""
- def fixup(m):
- text = m.group(0)
- if text[:2] == "":
- # character reference
- try:
- if text[:3] == "":
- return unichr(int(text[3:-1], 16))
- else:
- return unichr(int(text[2:-1]))
- except ValueError:
- pass
- else:
- # named entity
- try:
- text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
- except KeyError:
- pass
- return text # leave as is
- return re.sub("?\w+;", fixup, text)
-
-
-def convert_to_utf8_str(arg):
- # written by Michael Norton (http://docondev.blogspot.com/)
- if isinstance(arg, unicode):
- arg = arg.encode('utf-8')
- elif not isinstance(arg, str):
- arg = str(arg)
- return arg
-
-
-
-def import_simplejson():
- try:
- import simplejson as json
- except ImportError:
- try:
- import json # Python 2.6+
- except ImportError:
- try:
- from django.utils import simplejson as json # Google App Engine
- except ImportError:
- raise ImportError, "Can't load a json library"
-
- return json
-
-def list_to_csv(item_list):
- if item_list:
- return ','.join([str(i) for i in item_list])
-
-def urlencode_noplus(query):
- return '&'.join(['%s=%s' % (quote(str(k)), quote(str(v))) \
- for k, v in query.iteritems()])
-
diff --git a/plugins/8ball.py b/plugins/8ball.py
new file mode 100755
index 0000000..4c7f56d
--- /dev/null
+++ b/plugins/8ball.py
@@ -0,0 +1,23 @@
+from util import hook
+from util.text import multiword_replace
+import random
+
+color_codes = {
+ "": "\x02\x0305",
+ "": "\x02\x0303",
+ "": "\x02"
+}
+
+with open("plugins/data/8ball_responses.txt") as f:
+ responses = [line.strip() for line in
+ f.readlines()if not line.startswith("//")]
+
+
+@hook.command('8ball')
+def eightball(input, me=None):
+ "8ball -- The all knowing magic eight ball, " \
+ "in electronic form. Ask and it shall be answered!"
+
+ # here we use voodoo magic to tell the future
+ magic = multiword_replace(random.choice(responses), color_codes)
+ me("shakes the magic 8 ball... %s" % magic)
diff --git a/plugins/admin.py b/plugins/admin.py
old mode 100644
new mode 100755
index dde067e..1e23e6d
--- a/plugins/admin.py
+++ b/plugins/admin.py
@@ -1,230 +1,171 @@
+from util import hook
import os
-import sys
import re
import json
import time
import subprocess
-from util import hook
-
-@hook.command(autohelp=False, permissions=["permissions_users"])
-def permissions(inp, bot=None, notice=None):
- """permissions [group] -- lists the users and their permission level who have permissions."""
- permissions = bot.config.get("permissions", [])
- groups = []
- if inp:
- for k in permissions:
- if inp == k:
- groups.append(k)
- else:
- for k in permissions:
- groups.append(k)
- if not groups:
- notice("{} is not a group with permissions".format(inp))
- return None
-
- for v in groups:
- members = ""
- for value in permissions[v]["users"]:
- members = members + value + ", "
- if members:
- notice("the members in the {} group are..".format(v))
- notice(members[:-2])
+@hook.command(adminonly=True)
+def addadmin(inp, notice=None, bot=None, config=None):
+ "addadmin -- Make an admin. " \
+ "(you can add multiple admins at once)"
+ targets = inp.split()
+ for target in targets:
+ if target in bot.config["admins"]:
+ notice("%s is already an admin." % target)
else:
- notice("there are no members in the {} group".format(v))
+ notice("%s is now an admin." % target)
+ bot.config["admins"].append(target)
+ bot.config["admins"].sort()
+ json.dump(bot.config, open('config', 'w'), sort_keys=True, indent=2)
+ return
-@hook.command(permissions=["permissions_users"])
-def deluser(inp, bot=None, notice=None):
- """deluser [user] [group] -- removes elevated permissions from [user].
- If [group] is specified, they will only be removed from [group]."""
- permissions = bot.config.get("permissions", [])
- inp = inp.split(" ")
- groups = []
- try:
- specgroup = inp[1]
- except IndexError:
- specgroup = None
- for k in permissions:
- groups.append(k)
+@hook.command(adminonly=True)
+def deladmin(inp, notice=None, bot=None, config=None):
+ "deladmin -- Make a non-admin." \
+ "(you can delete multiple admins at once)"
+ targets = inp.split()
+ for target in targets:
+ if target in bot.config["admins"]:
+ notice("%s is no longer an admin." % target)
+ bot.config["admins"].remove(target)
+ bot.config["admins"].sort()
+ json.dump(bot.config, open('config', 'w'), sort_keys=True, indent=2)
+ else:
+ notice("%s is not an admin." % target)
+ return
+
+
+@hook.command(autohelp=False)
+def admins(inp, notice=None, bot=None):
+ "admins -- Lists bot's admins."
+ if bot.config["admins"]:
+ notice("Admins are: %s." % ", ".join(bot.config["admins"]))
else:
- for k in permissions:
- if specgroup == k:
- groups.append(k)
- if not groups:
- notice("{} is not a group with permissions".format(inp[1]))
- return None
-
- removed = 0
- for v in groups:
- users = permissions[v]["users"]
- for value in users:
- if inp[0] == value:
- users.remove(inp[0])
- removed = 1
- notice("{} has been removed from the group {}".format(inp[0], v))
- json.dump(bot.config, open('config', 'w'), sort_keys=True, indent=2)
- if specgroup:
- if removed == 0:
- notice("{} is not in the group {}".format(inp[0], specgroup))
- else:
- if removed == 0:
- notice("{} is not in any groups".format(inp[0]))
+ notice("There are no users with admin powers.")
+ return
-@hook.command(permissions=["permissions_users"])
-def adduser(inp, bot=None, notice=None):
- """adduser [user] [group] -- adds elevated permissions to [user].
- [group] must be specified."""
- permissions = bot.config.get("permissions", [])
- inp = inp.split(" ")
- try:
- user = inp[0]
- targetgroup = inp[1]
- except IndexError:
- notice("the group must be specified")
- return None
- if not re.search('.+!.+@.+', user):
- notice("the user must be in the form of \"nick!user@host\"")
- return None
- try:
- users = permissions[targetgroup]["users"]
- except KeyError:
- notice("no such group as {}".format(targetgroup))
- return None
- if user in users:
- notice("{} is already in {}".format(user, targetgroup))
- return None
-
- users.append(user)
- notice("{} has been added to the group {}".format(user, targetgroup))
- users.sort()
- json.dump(bot.config, open('config', 'w'), sort_keys=True, indent=2)
-
-
-@hook.command("quit", autohelp=False, permissions=["botcontrol"])
-@hook.command(autohelp=False, permissions=["botcontrol"])
+@hook.command("quit", autohelp=False, adminonly=True)
+@hook.command(autohelp=False, adminonly=True)
def stop(inp, nick=None, conn=None):
- """stop [reason] -- Kills the bot with [reason] as its quit message."""
+ "stop [reason] -- Kills the bot with [reason] as its quit message."
if inp:
- conn.cmd("QUIT", ["Killed by {} ({})".format(nick, inp)])
+ conn.cmd("QUIT", ["Killed by %s (%s)" % (nick, inp)])
else:
- conn.cmd("QUIT", ["Killed by {}.".format(nick)])
+ conn.cmd("QUIT", ["Killed by %s." % nick])
time.sleep(5)
os.execl("./cloudbot", "cloudbot", "stop")
-@hook.command(autohelp=False, permissions=["botcontrol"])
-def restart(inp, nick=None, conn=None, bot=None):
- """restart [reason] -- Restarts the bot with [reason] as its quit message."""
- for botcon in bot.conns:
- if inp:
- bot.conns[botcon].cmd("QUIT", ["Restarted by {} ({})".format(nick, inp)])
- else:
- bot.conns[botcon].cmd("QUIT", ["Restarted by {}.".format(nick)])
+@hook.command(autohelp=False, adminonly=True)
+def restart(inp, nick=None, conn=None):
+ "restart [reason] -- Restarts the bot with [reason] as its quit message."
+ if inp:
+ conn.cmd("QUIT", ["Restarted by %s (%s)" % (nick, inp)])
+ else:
+ conn.cmd("QUIT", ["Restarted by %s." % nick])
time.sleep(5)
- #os.execl("./cloudbot", "cloudbot", "restart")
- args = sys.argv[:]
- args.insert(0, sys.executable)
- os.execv(sys.executable, args)
+ os.execl("./cloudbot", "cloudbot", "restart")
-@hook.command(autohelp=False, permissions=["botcontrol"])
+@hook.command(autohelp=False, adminonly=True)
def clearlogs(inp, input=None):
- """clearlogs -- Clears the bots log(s)."""
+ "clearlogs -- Clears the bots log(s)."
subprocess.call(["./cloudbot", "clear"])
-@hook.command(permissions=["botcontrol"])
+@hook.command(adminonly=True)
def join(inp, conn=None, notice=None):
- """join -- Joins ."""
- for target in inp.split(" "):
- if not target.startswith("#"):
- target = "#{}".format(target)
- notice("Attempting to join {}...".format(target))
- conn.join(target)
+ "join -- Joins ."
+ notice("Attempting to join %s..." % inp)
+ conn.join(inp)
-@hook.command(autohelp=False, permissions=["botcontrol"])
+@hook.command(autohelp=False, adminonly=True)
def part(inp, conn=None, chan=None, notice=None):
- """part -- Leaves .
- If [channel] is blank the bot will leave the
- channel the command was used in."""
- if inp:
- targets = inp
- else:
- targets = chan
- for target in targets.split(" "):
- if not target.startswith("#"):
- target = "#{}".format(target)
- notice("Attempting to leave {}...".format(target))
- conn.part(target)
-
-
-@hook.command(autohelp=False, permissions=["botcontrol"])
-def cycle(inp, conn=None, chan=None, notice=None):
- """cycle -- Cycles .
- If [channel] is blank the bot will cycle the
- channel the command was used in."""
+ "part -- Leaves ." \
+ "If [channel] is blank the bot will leave the " \
+ "channel the command was used in."
if inp:
target = inp
else:
target = chan
- notice("Attempting to cycle {}...".format(target))
+ notice("Attempting to leave %s..." % target)
+ conn.part(target)
+
+
+@hook.command(autohelp=False, adminonly=True)
+def cycle(inp, conn=None, chan=None, notice=None):
+ "cycle -- Cycles ." \
+ "If [channel] is blank the bot will cycle the " \
+ "channel the command was used in."
+ if inp:
+ target = inp
+ else:
+ target = chan
+ notice("Attempting to cycle %s..." % target)
conn.part(target)
conn.join(target)
-@hook.command(permissions=["botcontrol"])
-def nick(inp, notice=None, conn=None):
- """nick -- Changes the bots nickname to ."""
+@hook.command(adminonly=True)
+def nick(inp, input=None, notice=None, conn=None):
+ "nick -- Changes the bots nickname to ."
if not re.match("^[A-Za-z0-9_|.-\]\[]*$", inp.lower()):
notice("Invalid username!")
return
- notice("Attempting to change nick to \"{}\"...".format(inp))
+ notice("Attempting to change nick to \"%s\"..." % inp)
conn.set_nick(inp)
-@hook.command(permissions=["botcontrol"])
+@hook.command(adminonly=True)
def raw(inp, conn=None, notice=None):
- """raw -- Sends a RAW IRC command."""
+ "raw -- Sends a RAW IRC command."
notice("Raw command sent.")
conn.send(inp)
-@hook.command(permissions=["botcontrol"])
-def say(inp, conn=None, chan=None):
- """say [channel] -- Makes the bot say in [channel].
- If [channel] is blank the bot will say the in the channel
- the command was used in."""
- inp = inp.split(" ")
- if inp[0][0] == "#":
- message = u" ".join(inp[1:])
- out = u"PRIVMSG {} :{}".format(inp[0], message)
- else:
- message = u" ".join(inp[0:])
- out = u"PRIVMSG {} :{}".format(chan, message)
- conn.send(out)
-
-
-@hook.command("act", permissions=["botcontrol"])
-@hook.command(permissions=["botcontrol"])
-def me(inp, conn=None, chan=None):
- """me [channel] -- Makes the bot act out in [channel].
- If [channel] is blank the bot will act the in the channel the
- command was used in."""
+@hook.command(adminonly=True)
+def say(inp, conn=None, chan=None, notice=None):
+ "say [channel] -- Makes the bot say in [channel]. " \
+ "If [channel] is blank the bot will say the in the channel " \
+ "the command was used in."
inp = inp.split(" ")
if inp[0][0] == "#":
message = ""
for x in inp[1:]:
message = message + x + " "
message = message[:-1]
- out = u"PRIVMSG {} :\x01ACTION {}\x01".format(inp[0], message)
+ out = "PRIVMSG %s :%s" % (inp[0], message)
else:
message = ""
for x in inp[0:]:
message = message + x + " "
message = message[:-1]
- out = u"PRIVMSG {} :\x01ACTION {}\x01".format(chan, message)
+ out = "PRIVMSG %s :%s" % (chan, message)
+ conn.send(out)
+
+
+@hook.command("act", adminonly=True)
+@hook.command(adminonly=True)
+def me(inp, conn=None, chan=None, notice=None):
+ "me [channel] -- Makes the bot act out in [channel]. " \
+ "If [channel] is blank the bot will act the in the channel the " \
+ "command was used in."
+ inp = inp.split(" ")
+ if inp[0][0] == "#":
+ message = ""
+ for x in inp[1:]:
+ message = message + x + " "
+ message = message[:-1]
+ out = "PRIVMSG %s :\x01ACTION %s\x01" % (inp[0], message)
+ else:
+ message = ""
+ for x in inp[0:]:
+ message = message + x + " "
+ message = message[:-1]
+ out = "PRIVMSG %s :\x01ACTION %s\x01" % (chan, message)
conn.send(out)
diff --git a/disabled_stuff/yahooanswers.py b/plugins/answers.py
similarity index 50%
rename from disabled_stuff/yahooanswers.py
rename to plugins/answers.py
index e28ed63..da846bc 100644
--- a/disabled_stuff/yahooanswers.py
+++ b/plugins/answers.py
@@ -3,14 +3,14 @@ from util import hook, web, text
@hook.command
def answer(inp):
- """answer -- find the answer to a question on Yahoo! Answers"""
+ ".answer -- find the answer to a question on Yahoo! Answers"
query = "SELECT Subject, ChosenAnswer, Link FROM answers.search WHERE query=@query LIMIT 1"
result = web.query(query, {"query": inp.strip()}).one()
- short_url = web.try_isgd(result["Link"])
+ short_url = web.isgd(result["Link"])
# we split the answer and .join() it to remove newlines/extra spaces
- answer_text = text.truncate_str(' '.join(result["ChosenAnswer"].split()), 80)
+ answer = text.truncate_str(" ".join(result["ChosenAnswer"].split()), 80)
- return u'\x02{}\x02 "{}" - {}'.format(result["Subject"], answer_text, short_url)
+ return u"\x02{}\x02 \"{}\" - {}".format(result["Subject"], answer, short_url)
\ No newline at end of file
diff --git a/plugins/bad_version.py b/plugins/bad_version.py
deleted file mode 100644
index 9465f2d..0000000
--- a/plugins/bad_version.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-from util import hook
-from thread import start_new_thread
-from time import sleep
-
-def wait_and_send(conn, wait, msg):
- sleep(wait)
- conn.send(msg)
-
-@hook.command("check")
-def check_nick(inp, conn=None):
- conn.send("PRIVMSG %s :\x01VERSION\x01" % inp)
-
-
-@hook.event("JOIN")
-def handle_join(info, input=None, conn=None):
- start_new_thread(wait_and_send, (conn, 5, "PRIVMSG %s :\x01VERSION\x01" % input.nick))
-
-@hook.event("NOTICE")
-def handle_ctcp_rply(info, input=None, conn=None, nick=None):
- print "notice..."
- print "-%s-" % input.lastparam
- if input.lastparam == ("\1VERSION %s\1" % "mIRC v7.22 Khaled Mardam-Bey"):
- for chan in conn.channels:
- if chan != "#logbot":
- conn.send("KICK %s %s :bad version" % (chan, nick))
- conn.send("MODE %s +b %s!*@*$#logbot" % (chan, nick))
-
diff --git a/plugins/bandwidth.py b/plugins/bandwidth.py
deleted file mode 100644
index ea23333..0000000
--- a/plugins/bandwidth.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from util import hook, http, web
-from subprocess import check_output, CalledProcessError
-from datetime import datetime
-
-@hook.command("bw", autohelp=False)
-def bw(inp):
- """bw - list last bandwidth measurement to the outside."""
-
- try:
- o = check_output("/bin/chch-bandwidth")
- except CalledProcessError as err:
- return "chch-bandwidth: returned %s" % (str(err))
-
- os = o.split(",")
- upl = int(os[-1])/1024.0/1024.0
- dl = int(os[-2])/1024.0/1024.0
- ts = os[0]
- tsd = datetime.strptime(ts, "%Y%m%d%H%M%S")
-
- return "%s: upl = %f Mbit/s; dl = %f Mbit/s;" % (tsd, upl, dl)
-
diff --git a/disabled_stuff/brainfuck.py b/plugins/bf.py
old mode 100644
new mode 100755
similarity index 84%
rename from disabled_stuff/brainfuck.py
rename to plugins/bf.py
index a7dc12e..0290279
--- a/disabled_stuff/brainfuck.py
+++ b/plugins/bf.py
@@ -1,5 +1,5 @@
-"""brainfuck interpreter adapted from (public domain) code at
-http://brainfuck.sourceforge.net/brain.py"""
+'''brainfuck interpreter adapted from (public domain) code at
+http://brainfuck.sourceforge.net/brain.py'''
import re
import random
@@ -14,7 +14,7 @@ MAX_STEPS = 1000000
@hook.command('brainfuck')
@hook.command
def bf(inp):
- """bf -- Executes as Brainfuck code."""
+ "bf -- Executes as Brainfuck code."
program = re.sub('[^][<>+-.,]', '', inp)
@@ -45,10 +45,10 @@ def bf(inp):
# the main program loop:
while ip < len(program):
c = program[ip]
- if c == '+':
- memory[mp] += 1 % 256
+ if c == '+':
+ memory[mp] = memory[mp] + 1 % 256
elif c == '-':
- memory[mp] -= 1 % 256
+ memory[mp] = memory[mp] - 1 % 256
elif c == '>':
mp += 1
if mp > rightmost:
@@ -57,7 +57,7 @@ def bf(inp):
# no restriction on memory growth!
memory.extend([0] * BUFFER_SIZE)
elif c == '<':
- mp -= 1 % len(memory)
+ mp = mp - 1 % len(memory)
elif c == '.':
output += chr(memory[mp])
if len(output) > 500:
@@ -76,7 +76,7 @@ def bf(inp):
if steps > MAX_STEPS:
if output == '':
output = '(no output)'
- output += '[exceeded {} iterations]'.format(MAX_STEPS)
+ output += '[exceeded %d iterations]' % MAX_STEPS
break
stripped_output = re.sub(r'[\x00-\x1F]', '', output)
diff --git a/plugins/bitcoin.py b/plugins/bitcoin.py
new file mode 100755
index 0000000..52835da
--- /dev/null
+++ b/plugins/bitcoin.py
@@ -0,0 +1,10 @@
+from util import http, hook
+
+
+@hook.command(autohelp=False)
+def bitcoin(inp, say=None):
+ "bitcoin -- Gets current exchange rate for bitcoins from mtgox."
+ data = http.get_json("https://mtgox.com/code/data/ticker.php")
+ ticker = data['ticker']
+ say("Current: \x0307$%(buy).2f\x0f - High: \x0307$%(high).2f\x0f"
+ " - Low: \x0307$%(low).2f\x0f - Volume: %(vol)s" % ticker)
diff --git a/plugins/chch_worker.py b/plugins/chch_worker.py
deleted file mode 100644
index e57a9cc..0000000
--- a/plugins/chch_worker.py
+++ /dev/null
@@ -1,310 +0,0 @@
-# -*- coding: utf-8 -*-
-from util import hook
-import re
-import time
-import requests
-import urllib
-from subprocess import check_output
-import json
-import socket
-import struct
-
-#def run_ecmd(cmd):
-## baseuri = "http://netio.chch.lan.ffc/ecmd?"
-## baseuri = "http://10.8.128.35/ecmd?"
-# baseuri = "http://127.0.0.1:4280/ecmd?"
-# cmds = "%20".join(cmd)
-# req = requests.get("%s%s" % (baseuri, cmds))
-# return req.text.strip()
-
-#def run_udp(cmd):
-# ip="127.0.0.1"
-# port=49152
-# s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-# # 100 ms timeout
-# s.settimeout(0.1)
-# s.connect((ip, port))
-# s.send(cmd)
-# try:
-# rec = s.recv(1024)
-# except:
-# rec = ""
-# s.close()
-# return rec
-
-## lamp_lounge handling
-#@hook.command("lamp_lounge", autohelp=True)
-#def cmd_lamp_lounge(inp, reply=None):
-# """lamp_lounge color - set the lamp color"""
-# args = inp.split(" ")
-# if len(args) < 1:
-# reply("lamp_lounge color - set the lamp color")
-# return
-#
-# if len(args[0]) != 6:
-# reply("lamp_lounge color - set the lamp color")
-# return
-#
-# c = "a\x00\x03" + struct.pack('BBB', int(args[0][2:4], 16), int(args[0][0:2], 16), int(args[0][4:6], 16))
-#
-# rep = run_tcp(c)
-#
-# if len(rep) < 3:
-# reply("Error: no reply")
-# return
-#
-# if rep[0] == 'a':
-# reply("OK")
-# elif rep[0] == 'e':
-# reply("error: " + rep[3:])
-# else:
-# reply("fatal error")
-
-@hook.command("corridor_light_toggle", autohelp=False)
-def cmd_corridor_light_toggle(inp, reply=None):
- """toggle corridor light modes"""
- reply(check_output("echo corridor_light_toggle | ssh -q -p 2322 command@127.0.0.1", shell=True).strip("\n").decode("utf-8"))
-
-@hook.command("corridor_light", autohelp=False)
-def cmd_corridor_light(inp, reply=None):
- """set corridor light color"""
- args = inp.split(" ")
- if len(args) < 1:
- reply("corridor_light color - set the light color")
- return
-
- if len(args[0]) != 6:
- reply("corridor_light color - set the light color")
- return
-
- reply(check_output("echo corridor_light " + args[0] + " | ssh -q -p 2322 command@127.0.0.1", shell=True).strip("\n").decode("utf-8"))
-
-@hook.command("lounge_light_toggle", autohelp=False)
-def cmd_lounge_light_toggle(inp, reply=None):
- """toggle lounge light modes"""
- reply(check_output("echo lounge_light_toggle | ssh -q -p 2322 command@127.0.0.1", shell=True).strip("\n").decode("utf-8"))
-
-@hook.command("lounge_light", autohelp=False)
-def cmd_lounge_light(inp, reply=None):
- """set lounge light color"""
- args = inp.split(" ")
- if len(args) < 1:
- reply("lounge_light color - set the light color")
- return
-
- if len(args[0]) != 6:
- reply("lounge_light color - set the light color")
- return
-
- reply(check_output("echo lounge_light " + args[0] + " | ssh -q -p 2322 command@127.0.0.1", shell=True).strip("\n").decode("utf-8"))
-
-@hook.command("elab_light_toggle", autohelp=False)
-def cmd_elab_light_toggle(inp, reply=None):
- """toggle e-lab light modes"""
- reply(check_output("echo e-lab_light_toggle | ssh -q -p 2322 command@127.0.0.1", shell=True).strip("\n").decode("utf-8"))
-
-@hook.command("elab_light", autohelp=False)
-def cmd_elab_light(inp, reply=None):
- """set e-lab light color"""
- args = inp.split(" ")
- if len(args) < 1:
- reply("e-lab_light color - set the light color")
- return
-
- if len(args[0]) != 6:
- reply("e-lab_light color - set the light color")
- return
-
- reply(check_output("echo e-lab_light " + args[0] + " | ssh -q -p 2322 command@127.0.0.1", shell=True).strip("\n").decode("utf-8"))
-
-## Lamp handling
-#@hook.command("lamp", autohelp=True)
-#def cmd_lamp(inp, reply=None):
-# """lamp color [mode] - set the lamp color"""
-# args = inp.split(" ")
-# if len(args) < 1:
-# reply("""lamp color [mode] - set the lamp color""")
-# return
-#
-# if len(args[0]) != 6:
-# reply("""lamp color [mode] - set the lamp color""")
-# return
-#
-# cmode = "s"
-# if len(args) > 1:
-# if args[1] == "s" or args[1] == "y" or args[1] == "f":
-# cmode = args[1]
-#
-# c = []
-# c.append([5, int(args[0][0:2], 16)])
-# c.append([4, int(args[0][2:4], 16)])
-# c.append([3, int(args[0][4:6], 16)])
-#
-# for ce in c:
-# res = run_ecmd(["channel", str(ce[0]), str(ce[1]), cmode])
-# if res != "OK":
-# return
-# reply("OK")
-
-#@hook.command("lamp_fadestep", autohelp=True)
-#def cmd_lamp_fadestep(inp, reply=None):
-# """lamp_fadestep step - set the lamp fadestep"""
-# args = inp.split(" ")
-#
-# if len(args) < 1:
-# reply("""lamp_fadestep step - set the lamp fadestep""")
-# return
-#
-# reply(run_ecmd(["fadestep", args[0]]))
-
-#@hook.command("lamp_fadestep_get", autohelp=False)
-#def cmd_lamp_fadestep_get(inp, reply=None):
-# """lamp_fadestep_get - get the lamp fadestep"""
-# reply(run_ecmd(["fadestep"]))
-#
-#@hook.command("lamp_channels", autohelp=False)
-#def cmd_lamp_channels(inp, reply=None):
-# """lamp_chanels - get the lamp channel count"""
-# reply(run_ecmd(["channels"]))
-
-# Wiki handling
-def wiki_changes(cmd=False):
- tmpfile = "/tmp/wikichanges.timestamp.txt"
- basewikiuri = "https://www.chaoschemnitz.de/index.php?title=%s"
- wikiapiuri = "https://www.chaoschemnitz.de/api.php?"\
- "action=query&list=recentchanges&format=json&"\
- "rcprop=user|userid|comment|parsedcomment|timestamp|"\
- "title|sha1|sizes|redirect|loginfo|tags|flags"\
- "&rclist=edit|external|new|log"
-
- try:
- fdch = open(tmpfile, "rw")
- timestamp = fdch.read()
- fdch.close()
- except IOError:
- timestamp = None
-
- try:
- r = requests.get(wikiapiuri, verify=False)
- except:
- return []
-
- rarr = []
- changes = r.json()["query"]["recentchanges"]
- ntimestamp = changes[0]["timestamp"]
- for change in changes:
- if change["timestamp"] == timestamp:
- break
- uri = basewikiuri % (urllib.quote(change["title"].encode("utf-8"), safe=""))
- rarr.append("wiki: %s changed '%s' ( %s ) comment: %s" %\
- (change["user"], change["title"], uri,\
- change["comment"].strip("\r\n\t")))
-
- if cmd == False:
- fdch = open(tmpfile, "w+")
- fdch.write("%s" % (ntimestamp))
- fdch.close()
-
- return rarr
-
-def print_wiki_changes(info, conn=None, chan=None):
- """print_wiki_changes - print wiki changes, when the worker calls"""
- ch = wiki_changes(cmd=False)
- if len(ch) == 0:
- return
- for c in ch[::-1]:
- conn.msg("#chaoschemnitz", c)
- time.sleep(0.5)
-
-@hook.command("wikichanges", autohelp=False)
-def cmd_wikichanges(inp, reply=None):
- """wikichanges - Return new recent wiki changes"""
- ch = wiki_changes(cmd=True)
- if len(ch) == 0:
- reply("No changes since the last call were made to the wiki.")
- else:
- for c in ch[::-1][-4:]:
- reply(c)
- time.sleep(0.5)
-
-# Status handling
-def getstatus():
-# try:
- fd = requests.get('http://www.chaoschemnitz.de/chch.json')
- chch_info = fd.json()
- if 'message' in chch_info['state']:
- message = chch_info['state']['message']
- if " | " in message:
- message = message.split(" | ", 1)[0]
- else:
- message = ""
-
- if chch_info['state']['open']:
- state = "geöffnet".decode("utf-8")
- else:
- state = "geschlossen"
-
- return "%s (%s)" % (state, message)
-# return check_output("sudo /bin/chch-status", shell=True).strip("\n").decode("utf-8")
-# except:
-# return "unbekannt"
-
-@hook.command("status", autohelp=False)
-def cmd_status(inp, reply=None):
- """status - Return the door status"""
- reply("Chaostreff Status: %s" % (getstatus()))
-
-@hook.event("TOPIC")
-def topic_update(info, conn=None, chan=None):
- print("topic update")
- """topic_update -- Update the topic on TOPIC command"""
- if chan != "#ChaosChemnitz":
- return
-
- status = getstatus()
- print("status: %s" % (status.encode('utf8')))
-
- topic = info[-1].split(" | ")
- print("topic: %s" % ([ elem.encode('utf8') for elem in topic ]))
-
- sstr = "Status: %s" % (status)
- print("sstr: %s" % (sstr.encode('utf8')))
- didset = False
- i = 0
- while i < len(topic):
- if sstr in topic[i]:
- print("Found current status in topic.")
- didset = True
- break
- if 'Status: ' in topic[i]:
- print("Found Status field in topic.")
- didset = True
- topic[i] = sstr
- i += 1
- if didset == False:
- print("No topic fiel was found, appending.")
- topic.append(sstr)
-
- newtopic = " | ".join(topic)
- if newtopic != info[-1]:
- conn.send("TOPIC %s :%s" % (chan, newtopic))
-
-@hook.event("332")
-def e332_update(info, conn=None, chan=None):
- """e332_update -- run after current topic was requested, runs worker tasks too"""
- chan = info[1]
- topic_update(info, conn=conn, chan=chan)
- print_wiki_changes(info, conn=conn, chan=chan)
-
-@hook.singlethread
-@hook.event("353")
-def e353_update(info, conn=None, chan=None):
- """e353_update -- runs after a channel (#chaoschemnitz) was joined"""
- chan = info[2]
- if chan.lower() == "#chaoschemnitz":
- conn.send("PRIVMSG Chanserv :op #chaoschemnitz")
-
- while True:
- time.sleep(60)
- conn.send("TOPIC %s" % (chan))
-
diff --git a/disabled_stuff/choose.py b/plugins/choose.py
old mode 100644
new mode 100755
similarity index 71%
rename from disabled_stuff/choose.py
rename to plugins/choose.py
index f478328..37b1077
--- a/disabled_stuff/choose.py
+++ b/plugins/choose.py
@@ -6,8 +6,8 @@ from util import hook
@hook.command
def choose(inp):
- """choose , [choice2], [choice3], [choice4], ... --
- Randomly picks one of the given choices."""
+ "choose , [choice2], [choice3], [choice4], ... -- " \
+ "Randomly picks one of the given choices."
c = re.findall(r'([^,]+)', inp)
if len(c) == 1:
diff --git a/disabled_stuff/coin.py b/plugins/coin.py
old mode 100644
new mode 100755
similarity index 54%
rename from disabled_stuff/coin.py
rename to plugins/coin.py
index 7cc2a2a..d96c587
--- a/disabled_stuff/coin.py
+++ b/plugins/coin.py
@@ -1,11 +1,10 @@
-import random
-
from util import hook
+import random
@hook.command(autohelp=False)
-def coin(inp, action=None):
- """coin [amount] -- Flips [amount] of coins."""
+def coin(inp, me=None):
+ "coin [amount] -- Flips [amount] of coins."
if inp:
try:
@@ -16,10 +15,11 @@ def coin(inp, action=None):
amount = 1
if amount == 1:
- action("flips a coin and gets {}.".format(random.choice(["heads", "tails"])))
+ me("flips a coin and gets %s." % random.choice(["heads", "tails"]))
elif amount == 0:
- action("makes a coin flipping motion with its hands.")
+ me("makes a coin flipping motion with its hands.")
else:
heads = int(random.normalvariate(.5 * amount, (.75 * amount) ** .5))
tails = amount - heads
- action("flips {} coins and gets {} heads and {} tails.".format(amount, heads, tails))
+ me("flips %i coins and gets " \
+ "%i heads and %i tails." % (amount, heads, tails))
diff --git a/plugins/core_sieve.py b/plugins/core_sieve.py
deleted file mode 100644
index 9d41c54..0000000
--- a/plugins/core_sieve.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import re
-from fnmatch import fnmatch
-
-from util import hook
-
-
-@hook.sieve
-def sieve_suite(bot, input, func, kind, args):
- if input.command == 'PRIVMSG' and \
- input.nick.endswith('bot') and args.get('ignorebots', True):
- return None
-
- if kind == "command":
- if input.trigger in bot.config.get('disabled_commands', []):
- return None
-
- fn = re.match(r'^plugins.(.+).py$', func._filename)
- disabled = bot.config.get('disabled_plugins', [])
- if fn and fn.group(1).lower() in disabled:
- return None
-
- acl = bot.config.get('acls', {}).get(func.__name__)
- if acl:
- if 'deny-except' in acl:
- allowed_channels = map(unicode.lower, acl['deny-except'])
- if input.chan.lower() not in allowed_channels:
- return None
- if 'allow-except' in acl:
- denied_channels = map(unicode.lower, acl['allow-except'])
- if input.chan.lower() in denied_channels:
- return None
-
- # shim so plugins using the old "adminonly" permissions format still work
- if args.get('adminonly', False):
- args["permissions"] = ["adminonly"]
-
- if args.get('permissions', False):
- groups = bot.config.get("permissions", [])
-
- allowed_permissions = args.get('permissions', [])
-
- mask = input.mask.lower()
-
- # loop over every group
- for key, group in groups.iteritems():
- # loop over every permission the command allows
- for permission in allowed_permissions:
- # see if the group has that permission
- if permission in group["perms"]:
- # if so, check it
- group_users = [_mask.lower() for _mask in group["users"]]
- for pattern in group_users:
- if fnmatch(mask, pattern):
- print "Allowed group {}.".format(group)
- return input
-
- input.notice("Sorry, you are not allowed to use this command.")
- return None
-
- return input
diff --git a/plugins/core_ctcp.py b/plugins/ctcp.py
old mode 100644
new mode 100755
similarity index 58%
rename from plugins/core_ctcp.py
rename to plugins/ctcp.py
index 3e7a200..ba7c7c2
--- a/plugins/core_ctcp.py
+++ b/plugins/ctcp.py
@@ -1,5 +1,6 @@
+# Plugin by neersighted
import time
-
+import getpass
from util import hook
@@ -16,4 +17,9 @@ def ctcp_ping(inp, notice=None):
@hook.regex(r'^\x01TIME\x01$')
def ctcp_time(inp, notice=None):
- notice('\x01TIME: The time is: {}'.format(time.strftime("%r", time.localtime())))
+ notice('\x01TIME: The time is: %s' % time.strftime("%r", time.localtime()))
+
+
+@hook.regex(r'^\x01FINGER\x01$')
+def ctcp_finger(inp, notice=None):
+ notice('\x01FINGER: Username is: $s' % getpass.getuser())
diff --git a/plugins/cypher.py b/plugins/cypher.py
new file mode 100755
index 0000000..ddff3dc
--- /dev/null
+++ b/plugins/cypher.py
@@ -0,0 +1,74 @@
+'''
+Plugin which (de)cyphers a string
+Doesn't cypher non-alphanumeric strings yet.
+by instanceoftom
+'''
+
+from util import hook
+chars = "abcdefghijklmnopqrstuvwxyz1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ "
+len_chars = len(chars)
+
+
+@hook.command
+def cypher(inp):
+ "cypher -- Cyphers with ."
+
+ passwd = inp.split(" ")[0]
+ len_passwd = len(passwd)
+ inp = " ".join(inp.split(" ")[1:])
+
+ out = ""
+ passwd_index = 0
+ for character in inp:
+ try:
+ chr_index = chars.index(character)
+ passwd_chr_index = chars.index(passwd[passwd_index])
+
+ out_chr_index = (chr_index + passwd_chr_index) % len_chars
+ out_chr = chars[out_chr_index]
+
+ out += out_chr
+
+ passwd_index = (passwd_index + 1) % len_passwd
+ except ValueError:
+ out += character
+ continue
+ return out
+
+
+@hook.command
+def decypher(inp):
+ "decypher -- Decyphers with ."
+
+ passwd = inp.split(" ")[0]
+ len_passwd = len(passwd)
+ inp = " ".join(inp.split(" ")[1:])
+
+ passwd_index = 0
+ for character in inp:
+ try:
+ chr_index = chars.index(character)
+ passwd_index = (passwd_index + 1) % len_passwd
+ except ValueError:
+ continue
+
+ passwd_index = passwd_index - 1
+ reversed_message = inp[::-1]
+
+ out = ""
+ for character in reversed_message:
+ try:
+ chr_index = chars.index(character)
+ passwd_chr_index = chars.index(passwd[passwd_index])
+
+ out_chr_index = (chr_index - passwd_chr_index) % len_chars
+ out_chr = chars[out_chr_index]
+
+ out += out_chr
+
+ passwd_index = (passwd_index - 1) % len_passwd
+ except ValueError:
+ out += character
+ continue
+
+ return out[::-1]
diff --git a/disabled_stuff/data/8ball_responses.txt b/plugins/data/8ball_responses.txt
similarity index 100%
rename from disabled_stuff/data/8ball_responses.txt
rename to plugins/data/8ball_responses.txt
diff --git a/disabled_stuff/data/flirts.txt b/plugins/data/flirts.txt
old mode 100644
new mode 100755
similarity index 87%
rename from disabled_stuff/data/flirts.txt
rename to plugins/data/flirts.txt
index 6490da8..f5eed69
--- a/disabled_stuff/data/flirts.txt
+++ b/plugins/data/flirts.txt
@@ -44,11 +44,4 @@ Do you live on a chicken farm? Because you sure know how to raise cocks.
Are you wearing space pants? Because your ass is out of this world.
Nice legs. What time do they open?
Are you lost? Because it’s so strange to see an angel so far from heaven.
-Your daddy must have been a baker, because you've got a nice set of buns.
-You're so beautiful that last night you made me forget my pickup line.
-I've never seen such dark eyes with so much light in them.
-I think we should just be friends with sexual tension.
-Whenever I see you I feel like a dog dying to get out of the car.
-If I'd have held you any closer I'd be in back of you.
-I wish I were on Facebook so I could poke you.
-I want you like JFK wanted a car with a roof.
+Your daddy must have been a baker, because you've got a nice set of buns.
\ No newline at end of file
diff --git a/disabled_stuff/data/fortunes.txt b/plugins/data/fortunes.txt
old mode 100644
new mode 100755
similarity index 100%
rename from disabled_stuff/data/fortunes.txt
rename to plugins/data/fortunes.txt
diff --git a/disabled_stuff/data/GeoLiteCity.dat b/plugins/data/geoip.dat
similarity index 63%
rename from disabled_stuff/data/GeoLiteCity.dat
rename to plugins/data/geoip.dat
index e94f60e..718e0e1 100644
Binary files a/disabled_stuff/data/GeoLiteCity.dat and b/plugins/data/geoip.dat differ
diff --git a/disabled_stuff/data/geoip_regions.json b/plugins/data/geoip_regions.json
similarity index 100%
rename from disabled_stuff/data/geoip_regions.json
rename to plugins/data/geoip_regions.json
diff --git a/disabled_stuff/data/insults.txt b/plugins/data/insults.txt
old mode 100644
new mode 100755
similarity index 100%
rename from disabled_stuff/data/insults.txt
rename to plugins/data/insults.txt
diff --git a/plugins/data/itemids.txt b/plugins/data/itemids.txt
new file mode 100755
index 0000000..37e1f50
--- /dev/null
+++ b/plugins/data/itemids.txt
@@ -0,0 +1,386 @@
+// obtained from
+// edited by Lukeroge, _frozen, and MufinMcFlufin
+// Block id
+1 Stone
+2 Grass Block
+3 Dirt
+4 Cobblestone
+5 Wooden Planks
+5:1 Pine Wood Planks
+5:2 Birch Wood Planks
+5:3 Jungle Wood Planks
+6 Sapling
+7 Bedrock
+8 Water
+9 Stationary Water
+10 Lava
+11 Stationary Lava
+12 Sand
+13 Gravel
+14 Gold Ore
+15 Iron Ore
+16 Coal Ore
+17 Wood
+17:1 Pine Wood
+17:2 Birch Wood
+17:3 Jungle Wood
+18 Leaves
+19 Sponge
+20 Glass
+21 Lapis Lazuli Ore
+22 Lapis Lazuli Block
+23 Dispenser
+24 Sandstone
+25 Note Block
+26 Bed
+27 Powered Rail
+28 Detector Rail
+29 Sticky Piston
+30 Cobweb
+31 Grass
+32 Dead Bush
+33 Piston
+//34 Piston Extended
+35 Wool
+35:1 Orange Wool
+35:2 Magenta Wool
+35:3 Light Blue Wool
+35:4 Yellow Wool
+35:5 Lime Wool
+35:6 Pink Wool
+35:7 Gray Wool
+35:8 Light Gray Wool
+35:9 Cyan Wool
+35:10 Purple Wool
+35:11 Blue Wool
+35:12 Brown Wool
+35:13 Green Wool
+35:14 Red Wool
+35:15 Black Wool
+35:0 White Wool
+//36 Block Moved by Piston
+37 Flower
+38 Rose
+39 Brown Mushroom
+40 Red Mushroom
+41 Block of Gold
+42 Block of Iron
+43 Double Slabs
+44 Slabs
+45 Bricks
+46 TNT
+47 Bookshelf
+48 Moss Stone
+49 Obsidian
+50 Torch
+51 Fire
+52 Monster Spawner
+53 Wooden Stairs
+54 Chest
+//55 Redstone Wire
+56 Diamond Ore
+57 Block of Diamond
+58 Crafting Table
+59 Crops
+60 Farmland
+61 Furnace
+62 Lit Furnace
+63 Sign
+64 Wooden Door
+65 Ladder
+66 Rail
+67 Stone Stairs
+68 Sign
+69 Lever
+70 Pressure Plate
+71 Iron Door
+72 Pressure Plate
+73 Redstone Ore
+74 Glowing Redstone Ore
+75 Redstone Torch
+76 Lit Redstone Torch
+77 Button
+78 Snow
+79 Ice
+80 Snow
+81 Cactus
+82 Clay
+83 Sugar cane
+84 Jukebox
+85 Fence
+86 Pumpkin
+87 Netherrack
+88 Soul Sand
+89 Glowstone
+90 Portal
+91 Jack 'o' Lantern
+92 Cake Block
+93 Redstone Repeater
+94 Redstone Repeater (On)
+95 Locked chest
+96 Trapdoor
+97 Hidden Silverfish
+98 Stone Bricks
+99 Mushroom
+100 Mushroom
+101 Iron Bars
+102 Glass Pane
+103 Melon
+104 Pumpkin Stem
+105 Melon Stem
+106 Vines
+107 Fence Gate
+108 Brick Stairs
+109 Stone Brick Stairs
+110 Mycelium
+111 Lily Pad
+112 Nether Brick
+113 Nether Brick Fence
+114 Nether Brick Stairs
+115 Nether Wart
+116 Enchantment Table
+117 Brewing stand
+118 Cauldron
+119 End Portal
+120 End Portal Frame
+121 End Stone
+122 Dragon Egg
+123 Redstone Lamp
+124 Lit Redstone Lamp
+127 Cocoa
+128 Sandstone Stairs
+129 Emerald Ore
+130 Ender Chest
+131 Tripwire Hook
+132 Tripwire
+133 Block of Emerald
+// Items Ids
+256 Iron Shovel
+257 Iron Pickaxe
+258 Iron Axe
+259 Flint and Steel
+260 Apple
+261 Bow
+262 Arrow
+263 Coal
+264 Diamond
+265 Iron Ingot
+266 Gold Ingot
+267 Iron Sword
+268 Wooden Sword
+269 Wooden Shovel
+270 Wooden Pickaxe
+271 Wooden Axe
+272 Stone Sword
+273 Stone Shovel
+274 Stone Pickaxe
+275 Stone Axe
+276 Diamond Sword
+277 Diamond Shovel
+278 Diamond Pickaxe
+279 Diamond Axe
+280 Stick
+281 Bowl
+282 Mushroom Stew
+283 Golden Sword
+284 Golden Shovel
+285 Golden Pickaxe
+286 Golden Axe
+287 String
+288 Feather
+289 Gunpowder
+290 Wooden Hoe
+291 Stone Hoe
+292 Iron Hoe
+293 Diamond Hoe
+294 Golden Hoe
+295 Seeds
+296 Wheat
+297 Bread
+298 Leather Cap
+299 Leather Tunic
+300 Leather Pants
+301 Leather Boots
+302 Chain Helmet
+303 Chain Chestplate
+304 Chain Leggings
+305 Chain Boots
+306 Iron Helmet
+307 Iron Chestplate
+308 Iron Leggings
+309 Iron Boots
+310 Diamond Helmet
+311 Diamond Chestplate
+312 Diamond Leggings
+313 Diamond Boots
+314 Golden Helmet
+315 Golden Chestplate
+316 Golden Leggings
+317 Golden boots
+318 Flint
+319 Raw Porkchop
+320 Cooked Porkchop
+321 Painting
+322 Golden Apple
+323 Sign
+324 Wooden Door
+325 Bucket
+326 Water Bucket
+327 Lava bucket
+328 Minecart
+329 Saddle
+330 Iron Door
+331 Redstone
+332 Snowball
+333 Boat
+334 Leather
+335 Milk
+336 Brick
+337 Clay
+338 Sugar Canes
+339 Paper
+340 Book
+341 Slimeball
+342 Minecart with Chest
+343 Minecart with Furnace
+344 Egg
+345 Compass
+346 Fishing Rod
+347 Clock
+348 Glowstone Dust
+349 Raw Fish
+350 Cooked Fish
+351 Dye
+351:0 Ink Sac
+351:1 Rose Red
+351:2 Cactus Green
+351:3 Cocoa Beans
+351:4 Lapis Lazuli
+351:5 Purple Dye
+351:6 Cyan Dye
+351:7 Light Gray Dye
+351:8 Gray Dye
+351:9 Pink Dye
+351:10 Lime Dye
+351:11 Dandelion Yellow
+351:12 Light Blue Dye
+351:13 Magenta Dye
+351:14 Orange Dye
+351:15 Bone Meal
+352 Bone
+353 Sugar
+354 Cake
+355 Bed
+356 Redstone Repeater
+357 Cookie
+358 Map
+359 Shears
+360 Melon
+361 Pumpkin Seeds
+362 Melon Seeds
+363 Raw Beef
+364 Steak
+365 Raw Chicken
+366 Cooked Chicken
+367 Rotten Flesh
+368 Ender Pearl
+369 Blaze Rod
+370 Ghast Tear
+371 Gold Nugget
+372 Nether Wart
+373 Potion
+373:16 Awkward Potion
+373:32 Thick Potion
+373:64 Mundane Potion
+373:8193 Regeneration Potion (0:45)
+373:8194 Swiftness Potion (3:00)
+373:8195 Fire Resistance Potion (3:00)
+373:8196 Poison Potion (0:45)
+373:8197 Healing Potion
+373:8200 Weakness Potion (1:30)
+373:8201 Strength Potion (3:00)
+373:8202 Slowness Potion (1:30)
+373:8204 Harming Potion
+373:8225 Regeneration Potion II (0:22)
+373:8226 Swiftness Potion II (1:30)
+373:8228 Poison Potion II (0:22)
+373:8229 Healing Potion II
+373:8233 Strength Potion II (1:30)
+373:8236 Harming Potion II
+373:8257 Regeneration Potion (2:00)
+373:8258 Swiftness Potion (8:00)
+373:8259 Fire Resistance Potion (8:00)
+373:8260 Poison Potion (2:00)
+373:8264 Weakness Potion (4:00)
+373:8265 Strength Potion (8:00)
+373:8266 Slowness Potion (4:00)
+373:16378 Fire Resistance Splash (2:15)
+373:16385 Regeneration Splash (0:33)
+373:16386 Swiftness Splash (2:15)
+373:16388 Poison Splash (0:33)
+373:16389 Healing Splash
+373:16392 Weakness Splash (1:07)
+373:16393 Strength Splash (2:15)
+373:16394 Slowness Splash (1:07)
+373:16396 Harming Splash
+373:16418 Swiftness Splash II (1:07)
+373:16420 Poison Splash II (0:16)
+373:16421 Healing Splash II
+373:16425 Strength Splash II (1:07)
+373:16428 Harming Splash II
+373:16449 Regeneration Splash (1:30)
+373:16450 Swiftness Splash (6:00)
+373:16451 Fire Resistance Splash (6:00)
+373:16452 Poison Splash (1:30)
+373:16456 Weakness Splash (3:00)
+373:16457 Strength Splash (6:00)
+373:16458 Slowness Splash (3:00)
+373:16471 Regeneration Splash II (0:16)
+374 Glass Bottle
+375 Spider Eye
+376 Fermented Spider Eye
+377 Blaze Powder
+378 Magma Cream
+379 Brewing Stand
+380 Cauldron
+381 Eye of Ender
+382 Glistering Melon
+// 383 Spawn Egg
+383:50 Creeper Egg
+383:51 Skeleton Egg
+383:52 Spider Egg
+383:54 Zombie Egg
+383:55 Slime Egg
+383:56 Ghast Egg
+383:57 Zombie Pigman Egg
+383:58 Enderman Egg
+383:59 Cave Spider Egg
+383:60 Silverfish Egg
+383:61 Blaze Egg
+383:62 Magma Cube Egg
+383:90 Pig Egg
+383:91 Sheep Egg
+383:92 Cow Egg
+383:93 Chicken Egg
+383:94 Squid Egg
+383:95 Wolf Egg
+383:96 Mooshroom Egg
+383:98 Ocelot Egg
+383:120 Villager Egg
+384 Bottle Of Enchanting
+385 Fire Charge
+386 Book and Quill
+387 Written Book
+388 Emerald
+// Records
+2256 Music Disc 13
+2257 Music Disc Cat
+2258 Music Disc Blocks
+2259 Music Disc Chirp
+2260 Music Disc Far
+2261 Music Disc Mall
+2262 Music Disc Mellohi
+2263 Music Disc Stal
+2264 Music Disc Strad
+2265 Music Disc Ward
+2266 Music Disc 11
diff --git a/disabled_stuff/data/kills.txt b/plugins/data/kills.txt
old mode 100644
new mode 100755
similarity index 100%
rename from disabled_stuff/data/kills.txt
rename to plugins/data/kills.txt
diff --git a/disabled_stuff/data/larts.txt b/plugins/data/larts.txt
old mode 100644
new mode 100755
similarity index 89%
rename from disabled_stuff/data/larts.txt
rename to plugins/data/larts.txt
index 029e3a0..5cdad23
--- a/disabled_stuff/data/larts.txt
+++ b/plugins/data/larts.txt
@@ -1,6 +1,6 @@
smacks {user} in the face with a burlap sack full of broken glass.
swaps {user}'s shampoo with glue.
-installs Windows Vista on {user}'s computer.
+installs Windows on {user}'s computer.
forces {user} to use perl for 3 weeks.
registers {user}'s name with 50 known spammers.
resizes {user}'s console to 40x24.
@@ -37,8 +37,10 @@ takes away {user}'s internet connection.
pushes {user} past the Shoe Event Horizon.
counts '1, 2, 5... er... 3!' and hurls the Holy Handgrenade Of Antioch at {user}.
puts {user} in a nest of camel spiders.
+makes {user} read slashdot at -1.
puts 'alias vim=emacs' in {user}'s /etc/profile.
uninstalls every web browser from {user}'s system.
+locks {user} in the Chateau d'If.
signs {user} up for getting hit on the head lessons.
makes {user} try to set up a Lexmark printer.
fills {user}'s eyedrop bottle with lime juice.
@@ -51,10 +53,13 @@ puts sugar between {user}'s bedsheets.
pours sand into {user}'s breakfast.
mixes epoxy into {user}'s toothpaste.
puts Icy-Hot in {user}'s lube container.
+straps {user} to a chair, and plays a endless low bitrate MP3 loop of "the world's most annoying sound" from "Dumb and Dumber".
+tells Dr. Dre that {user} was talking smack.
forces {user} to use a Commodore 64 for all their word processing.
puts {user} in a room with several heavily armed manic depressives.
makes {user} watch reruns of "Blue's Clues".
puts lye in {user}'s coffee.
+introduces {user} to the clue-by-four.
tattoos the Windows symbol on {user}'s ass.
lets Borg have his way with {user}.
signs {user} up for line dancing classes at the local senior center.
@@ -89,11 +94,12 @@ signs {user} up for the Iowa State Ferret Legging Championship.
attempts to hotswap {user}'s RAM.
dragon punches {user}.
puts railroad spikes into {user}'s side.
-replaces {user}'s lubricant with liquid weld.
+replaces {user}'s Astroglide with JB Weld.
replaces {user}'s stress pills with rat poison pellets.
-replaces {user}'s itch cream with hair removal cream.
+replaces {user}'s crotch itch cream with Nair.
does the Australian Death Grip on {user}.
dances upon the grave of {user}'s ancestors.
-farts loudly in {user}'s general direction.
+farts in {user}'s general direction.
flogs {user} with stinging nettle.
+intoduces {user} to the Knights who say Ni.
hands {user} a poison ivy joint.
diff --git a/disabled_stuff/data/name_files/dragons.json b/plugins/data/name_files/dragons.json
similarity index 100%
rename from disabled_stuff/data/name_files/dragons.json
rename to plugins/data/name_files/dragons.json
diff --git a/disabled_stuff/data/name_files/dwarves.json b/plugins/data/name_files/dwarves.json
similarity index 100%
rename from disabled_stuff/data/name_files/dwarves.json
rename to plugins/data/name_files/dwarves.json
diff --git a/disabled_stuff/data/name_files/elves_female.json b/plugins/data/name_files/elves_female.json
similarity index 100%
rename from disabled_stuff/data/name_files/elves_female.json
rename to plugins/data/name_files/elves_female.json
diff --git a/disabled_stuff/data/name_files/elves_male.json b/plugins/data/name_files/elves_male.json
similarity index 100%
rename from disabled_stuff/data/name_files/elves_male.json
rename to plugins/data/name_files/elves_male.json
diff --git a/disabled_stuff/data/name_files/fantasy.json b/plugins/data/name_files/fantasy.json
similarity index 100%
rename from disabled_stuff/data/name_files/fantasy.json
rename to plugins/data/name_files/fantasy.json
diff --git a/disabled_stuff/data/name_files/female.json b/plugins/data/name_files/female.json
similarity index 100%
rename from disabled_stuff/data/name_files/female.json
rename to plugins/data/name_files/female.json
diff --git a/disabled_stuff/data/name_files/general.json b/plugins/data/name_files/general.json
similarity index 100%
rename from disabled_stuff/data/name_files/general.json
rename to plugins/data/name_files/general.json
diff --git a/disabled_stuff/data/name_files/hobbits.json b/plugins/data/name_files/hobbits.json
similarity index 100%
rename from disabled_stuff/data/name_files/hobbits.json
rename to plugins/data/name_files/hobbits.json
diff --git a/disabled_stuff/data/name_files/inns.json b/plugins/data/name_files/inns.json
similarity index 100%
rename from disabled_stuff/data/name_files/inns.json
rename to plugins/data/name_files/inns.json
diff --git a/disabled_stuff/data/name_files/items.json b/plugins/data/name_files/items.json
similarity index 100%
rename from disabled_stuff/data/name_files/items.json
rename to plugins/data/name_files/items.json
diff --git a/disabled_stuff/data/name_files/male.json b/plugins/data/name_files/male.json
similarity index 100%
rename from disabled_stuff/data/name_files/male.json
rename to plugins/data/name_files/male.json
diff --git a/disabled_stuff/data/name_files/narn.json b/plugins/data/name_files/narn.json
similarity index 100%
rename from disabled_stuff/data/name_files/narn.json
rename to plugins/data/name_files/narn.json
diff --git a/disabled_stuff/data/name_files/warrior_cats.json b/plugins/data/name_files/warrior_cats.json
similarity index 100%
rename from disabled_stuff/data/name_files/warrior_cats.json
rename to plugins/data/name_files/warrior_cats.json
diff --git a/disabled_stuff/data/recipes.txt b/plugins/data/recipes.txt
old mode 100644
new mode 100755
similarity index 68%
rename from disabled_stuff/data/recipes.txt
rename to plugins/data/recipes.txt
index 2b0e1db..14ce8cf
--- a/disabled_stuff/data/recipes.txt
+++ b/plugins/data/recipes.txt
@@ -1,7 +1,7 @@
//Minecraft Recipes List
//Created by _303
//Obtained from https://github.com/ClouDev/CloudBot/blob/develop/plugins/data/recipes.txt
-//Edited by CHCMATT for Minecraft version: 1.7.4
+//Edited by _frozen
//
//Summary of Use: Each column is seperated by a comma (,) and rows by a vertical bar (|). Order of Recipes & Categories taken from
//www.minecraftwiki.net/wiki/Crafting for easier updating in the future (The Future!)
@@ -21,10 +21,7 @@
1x Block of Gold: Gold Ingot, Gold Ingot, Gold Ingot | Gold Ingot, Gold Ingot, Gold Ingot | Gold Ingot, Gold Ingot, Gold Ingot
1x Block of Iron: Iron Ingot, Iron Ingot, Iron Ingot | Iron Ingot, Iron Ingot, Iron Ingot | Iron Ingot, Iron Ingot, Iron Ingot
1x Block of Diamond: Diamond, Diamond, Diamond | Diamond, Diamond, Diamond | Diamond, Diamond, Diamond
-1x Block of Coal: Coal, Coal, Coal | Coal, Coal, Coal | Coal, Coal, Coal
-1x Block of Redstone: Redstone Dust, Redstone Dust, Redstone Dust | Redstone Dust, Redstone Dust, Redstone Dust | Redstone Dust, Redstone Dust, Redstone Dust
1x Lapis Lazuli Block: Lapis Lazuli, Lapis Lazuli, Lapis Lazuli | Lapis Lazuli, Lapis Lazuli, Lapis Lazuli | Lapis Lazuli, Lapis Lazuli, Lapis Lazuli
-1x Emerald Block: Emerald, Emerald, Emerald | Emerald, Emerald, Emerald | Emerald, Emerald, Emerald
1x Glowstone: Glowstone Dust, Glowstone Dust | Glowstone Dust, Glowstone Dust
1x Wool: String, String | String, String
1x TNT: Gunpowder, Sand, Gunpowder | Sand, Gunpowder, Sand | Gunpowder, Sand, Gunpowder
@@ -120,7 +117,6 @@
6x Powered Rail: Gold Ingot, None, Gold Ingot | Gold Ingot, Stick, Gold Ingot | Gold Ingot, Redstone, Gold Ingot
6x Detector Rail: Iron Ingot, None, Iron Ingot | Iron Ingot, Pressure Plate, Iron Ingot | Iron Ingot, Redstone, Iron Ingot
1x Boat: Wooden Planks, None, Wooden Planks | Wooden Planks, Wooden Planks, Wooden Planks
-1x Carrot On A Stick: Fishing Rod | None, Carrot
//
//Mechanism Recipes
//
@@ -129,8 +125,7 @@
2x Trapdoor: Wooden Planks, Wooden Planks, Wooden Planks | Wooden Planks, Wooden Planks, Wooden Planks
1x Stone Pressure Plate: Stone, Stone
1x Wooden Pressure Plate: Wooden Planks, Wooden Planks
-1x Stone Button: Stone
-1x Wooden Button: Wooden Planks
+1x Button: Stone | Stone
1x Redstone Torch: Redstone | Stick
1x Lever: Stick | Cobblestone
1x Note Block: Wooden Planks, Wooden Planks, Wooden Planks | Wooden Planks, Redstone, Wooden Planks | Wooden Planks, Wooden Planks, Wooden Planks
@@ -138,13 +133,8 @@
1x Dispenser: Cobblestone, Cobblestone, Cobblestone | Cobblestone, Bow, Cobblestone | Cobblestone, Redstone, Cobblestone
1x Redstone Repeater: Redstone Torch, Redstone, Redstone Torch | Stone, Stone, Stone
1x Piston: Wooden Planks, Wooden Planks, Wooden Planks | Cobblestone, Iron Ingot, Cobblestone | Cobblestone, Redstone, Cobblestone
-1x Sticky Piston: Slime Ball | Piston
-1x Redstone Lamp: None, Redstone Dust, None | Redstone Dust, Glowstone Block, Redstone Dust | None, Redstone Dust, None
-1x Trapped Chest: Chest, Tripwire Hook
-1x Dropper: Cobblestone, Cobblestone, Cobblestone | Cobblestone, None, Cobblestone | Cobblestone, Redstone Dust, Cobblestone
-1x Weighted Pressure Plate (Heavy): Iron Ingot, Iron Ingot
-1x Weighted Pressure Plate (Light): Gold Ingot, Gold Ingot
-2x Tripwire Hook: Iron Ingot | Stick | Wooden Planks
+1x Sticky Piston: none, slime ball, none | none, piston, none
+1x Redstone Lamp: none, redstone dust, none | redstone dust, glowstone block, redstone | none, redstone dust, none
//
//Food Recipes
//
@@ -179,11 +169,6 @@
9x Gold Nugget: Gold Ingot
1x Gold Ingot: Gold Nugget, Gold Nugget, Gold Nugget | Gold Nugget, Gold Nugget, Gold Nugget | Gold Nugget, Gold Nugget, Gold Nugget
1x Eye of Ender: Ender Pearl | Blaze Powder
-1x Item Frame: Stick, Stick, Stick | Stick, Leather, Stick | Stick, Stick, Stick
-1x Anvil: Block of Iron, Block of Iron, Block of Iron | None, Iron Ingot, None | Iron Ingot, Iron Ingot, Iron Ingot
-1x Ender Chest: Obsidian, Obsidian, Obsidian | Osbidian, Eye of Ender, Obsidian | Obsidian, Obsidian, Obsidian
-1x Flower Pot: Brick, None, Brick | None, Brick, None
-2x Lead: None, String, String | None, Slime Ball, String | String, None, None
//
//Dye Recipes
//
@@ -229,41 +214,4 @@
1x Fermented Spider Eye: Spider Eye | Brown Mushroom, Sugar
1x Glistering Melon: Melon Slice, Gold Nugget
9x Gold Nugget: Gold Ingot
-1x Enchantment Table: None, Book, None | Diamond, Obsidian, Diamond | Obsidian, Obsidian, Obsidian
-//
-//Stained Glass Recipes
-//
-8x White Stained Glass: Glass, Glass, Glass | Glass, Bone Meal, Glass | Glass, Glass, Glass
-8x Orange Stained Glass: Glass, Glass, Glass | Glass, Orange Dye, Glass | Glass, Glass, Glass
-8x Magenta Stained Glass: Glass, Glass, Glass | Glass, Magenta Dye, Glass | Glass, Glass, Glass
-8x Light Blue Stained Glass: Glass, Glass, Glass | Glass, Light Blue Dye, Glass | Glass, Glass, Glass
-8x Yellow Stained Glass: Glass, Glass, Glass | Glass, Dandelion Yellow, Glass | Glass, Glass, Glass
-8x Lime Stained Glass: Glass, Glass, Glass | Glass, Lime Dye, Glass | Glass, Glass, Glass
-8x Pink Stained Glass: Glass, Glass, Glass | Glass, Pink Dye, Glass | Glass, Glass, Glass
-8x Gray Stained Glass: Glass, Glass, Glass | Glass, Gray Dye, Glass | Glass, Glass, Glass
-8x Light Gray Stained Glass: Glass, Glass, Glass | Glass, Light Gray Dye, Glass | Glass, Glass, Glass
-8x Cyan Stained Glass: Glass, Glass, Glass | Glass, Cyan Dye, Glass | Glass, Glass, Glass
-8x Purple Stained Glass: Glass, Glass, Glass | Glass, Purple Dye, Glass | Glass, Glass, Glass
-8x Blue Stained Glass: Glass, Glass, Glass | Glass, Lapis Lazuli, Glass | Glass, Glass, Glass
-8x Brown Stained Glass: Glass, Glass, Glass | Glass, Cocoa Beans, Glass | Glass, Glass, Glass
-8x Green Stained Glass: Glass, Glass, Glass | Glass, Cactus Green, Glass | Glass, Glass, Glass
-8x Red Stained Glass: Glass, Glass, Glass | Glass, Rose Red, Glass | Glass, Glass, Glass
-8x Black Stained Glass: Glass, Glass, Glass | Glass, Inc Sac, Glass | Glass, Glass, Glass
-//
-//Stained Glass Panes
-//
-16x White Stained Glass Panes: White Stained Glass, White Stained Glass, White Stained Glass | White Stained Glass, White Stained Glass, White Stained Glass
-16x Orange Stained Glass Panes: Orange Stained Glass, Orange Stained Glass, Orange Stained Glass | Orange Stained Glass, Orange Stained Glass, Orange Stained Glass
-16x Magenta Stained Glass Panes: Magenta Stained Glass, Magenta Stained Glass, Magenta Stained Glass | Magenta Stained Glass, Magenta Stained Glass, Magenta Stained Glass
-16x Light Blue Stained Glass Panes: Light Blue Stained Glass, Light Blue Stained Glass, Light Blue Stained Glass | Light Blue Stained Glass, Light Blue Stained Glass, Light Blue Stained Glass
-16x Yellow Stained Glass Panes: Yellow Stained Glass, Yellow Stained Glass, Yellow Stained Glass | Yellow Stained Glass, Yellow Stained Glass, Yellow Stained Glass
-16x Lime Stained Glass Panes: Lime Stained Glass, Lime Stained Glass, Lime Stained Glass | Lime Stained Glass, Lime Stained Glass, Lime Stained Glass
-16x Pink Stained Glass Panes: Pink Stained Glass, Pink Stained Glass, Pink Stained Glass | Pink Stained Glass, Pink Stained Glass, Pink Stained Glass
-16x Gray Stained Glass Panes: Gray Stained Glass, Gray Stained Glass, Gray Stained Glass | Gray Stained Glass, Gray Stained Glass, Gray Stained Glass
-16x Light Gray Stained Glass Panes: Light Gray Stained Glass, Light Gray Stained Glass, Light Gray Stained Glass | Light Gray Stained Glass, Light Gray Stained Glass, Light Gray Stained Glass
-16x Cyan Stained Glass Panes: Cyan Stained Glass, Cyan Stained Glass, Cyan Stained Glass | Cyan Stained Glass, Cyan Stained Glass, Cyan Stained Glass
-16x Purple Stained Glass Panes: Purple Stained Glass, Purple Stained Glass, Purple Stained Glass | Purple Stained Glass, Purple Stained Glass, Purple Stained Glass
-16x Blue Stained Glass Panes: Blue Stained Glass, Blue Stained Glass, Blue Stained Glass | Blue Stained Glass, Blue Stained Glass, Blue Stained Glass
-16x Brown Stained Glass Panes: Brown Stained Glass, Brown Stained Glass, Brown Stained Glass | Brown Stained Glass, Brown Stained Glass, Brown Stained Glass
-16x Green Stained Glass Panes: Green Stained Glass, Green Stained Glass, Green Stained Glass | Green Stained Glass, Green Stained Glass, Green Stained Glass
-16x Black Stained Glass Panes: Black Stained Glass, Black Stained Glass, Black Stained Glass | Black Stained Glass, Black Stained Glass, Black Stained Glass
+1x Enchantment Table: None, Book, None | Diamond, Obsidian, Diamond | Obsidian, Obsidian, Obsidian
\ No newline at end of file
diff --git a/plugins/data/slap_items.txt b/plugins/data/slap_items.txt
new file mode 100755
index 0000000..2f53980
--- /dev/null
+++ b/plugins/data/slap_items.txt
@@ -0,0 +1,19 @@
+cast iron skillet
+large trout
+baseball bat
+wooden cane
+CRT monitor
+diamond sword
+physics textbook
+television
+mau5head
+five ton truck
+roll of duct tape
+book
+cobblestone block
+lava bucket
+rubber chicken
+gold block
+fire extinguisher
+heavy rock
+chunk of dirt
diff --git a/plugins/data/slaps.txt b/plugins/data/slaps.txt
new file mode 100755
index 0000000..8952952
--- /dev/null
+++ b/plugins/data/slaps.txt
@@ -0,0 +1,14 @@
+slaps {user} with a {item}.
+slaps {user} around a bit with a {item}.
+throws a {item} at {user}.
+chucks a few {item}s at {user}.
+grabs a {item} and throws it in {user}'s face.
+launches a {item} in {user}'s general direction.
+sits on {user}'s face while slamming a {item} into their crotch.
+starts slapping {user} silly with a {item}.
+holds {user} down and repeatedly whacks them with a {item}.
+prods {user} with a flaming {item}.
+picks up a {item} and whacks {user} with it.
+ties {user} to a chair and throws a {item} at them.
+hits {user} on the head with a {item}.
+ties {user} to a pole and whips them with a {item}.
diff --git a/disabled_stuff/data/slogans.txt b/plugins/data/slogans.txt
old mode 100644
new mode 100755
similarity index 100%
rename from disabled_stuff/data/slogans.txt
rename to plugins/data/slogans.txt
diff --git a/disabled_stuff/dice.py b/plugins/dice.py
old mode 100644
new mode 100755
similarity index 69%
rename from disabled_stuff/dice.py
rename to plugins/dice.py
index a89f3d5..d479f84
--- a/disabled_stuff/dice.py
+++ b/plugins/dice.py
@@ -14,8 +14,8 @@ sign_re = re.compile(r'[+-]?(?:\d*d)?(?:\d+|F)', re.I)
split_re = re.compile(r'([\d+-]*)d?(F|\d*)', re.I)
-def n_rolls(count, n):
- """roll an n-sided die count times"""
+def nrolls(count, n):
+ "roll an n-sided die count times"
if n == "F":
return [random.randint(-1, 1) for x in xrange(min(count, 100))]
if n < 2: # it's a coin
@@ -28,16 +28,16 @@ def n_rolls(count, n):
return [random.randint(1, n) for x in xrange(count)]
else: # fake it
return [int(random.normalvariate(.5 * (1 + n) * count,
- (((n + 1) * (2 * n + 1) / 6. -
- (.5 * (1 + n)) ** 2) * count) ** .5))]
+ (((n + 1) * (2 * n + 1) / 6. -
+ (.5 * (1 + n)) ** 2) * count) ** .5))]
@hook.command('roll')
#@hook.regex(valid_diceroll, re.I)
@hook.command
def dice(inp):
- """dice -- Simulates dice rolls. Example of :
- 'dice 2d20-d5+4 roll 2'. D20s, subtract 1D5, add 4"""
+ "dice -- Simulates dicerolls. Example of :" \
+ " 'dice 2d20-d5+4 roll 2'. D20s, subtract 1D5, add 4"
try: # if inp is a re.match object...
(inp, desc) = inp.groups()
@@ -49,7 +49,7 @@ def dice(inp):
spec = whitespace_re.sub('', inp)
if not valid_diceroll_re.match(spec):
- return "Invalid dice roll"
+ return "Invalid diceroll"
groups = sign_re.findall(spec)
total = 0
@@ -59,7 +59,7 @@ def dice(inp):
count, side = split_re.match(roll).groups()
count = int(count) if count not in " +-" else 1
if side.upper() == "F": # fudge dice are basically 1d3-2
- for fudge in n_rolls(count, "F"):
+ for fudge in nrolls(count, "F"):
if fudge == 1:
rolls.append("\x033+\x0F")
elif fudge == -1:
@@ -73,18 +73,17 @@ def dice(inp):
side = int(side)
try:
if count > 0:
- d = n_rolls(count, side)
- rolls += map(str, d)
- total += sum(d)
+ dice = nrolls(count, side)
+ rolls += map(str, dice)
+ total += sum(dice)
else:
- d = n_rolls(-count, side)
- rolls += [str(-x) for x in d]
- total -= sum(d)
+ dice = nrolls(-count, side)
+ rolls += [str(-x) for x in dice]
+ total -= sum(dice)
except OverflowError:
- # I have never seen this happen. If you make this happen, you win a cookie
return "Thanks for overflowing a float, jerk >:["
if desc:
- return "{}: {} ({})".format(desc.strip(), total, ", ".join(rolls))
+ return "%s: %d (%s)" % (desc.strip(), total, ", ".join(rolls))
else:
- return "{} ({})".format(total, ", ".join(rolls))
+ return "%d (%s)" % (total, ", ".join(rolls))
diff --git a/disabled_stuff/dictionary.py b/plugins/dictionary.py
old mode 100644
new mode 100755
similarity index 82%
rename from disabled_stuff/dictionary.py
rename to plugins/dictionary.py
index 5b4123b..2bd5ae6
--- a/disabled_stuff/dictionary.py
+++ b/plugins/dictionary.py
@@ -1,6 +1,5 @@
# Plugin by GhettoWizard and Scaevolus
import re
-
from util import hook
from util import http
@@ -8,7 +7,7 @@ from util import http
@hook.command('dictionary')
@hook.command
def define(inp):
- """define -- Fetches definition of ."""
+ "define -- Fetches definition of ."
url = 'http://ninjawords.com/'
@@ -19,14 +18,14 @@ def define(inp):
'//div[@class="example"]')
if not definition:
- return u'No results for {} :('.format(inp)
+ return 'No results for ' + inp + ' :('
def format_output(show_examples):
- result = u'{}: '.format(h.xpath('//dt[@class="title-word"]/a/text()')[0])
+ result = '%s: ' % h.xpath('//dt[@class="title-word"]/a/text()')[0]
correction = h.xpath('//span[@class="correct-word"]/text()')
if correction:
- result = 'Definition for "{}": '.format(correction[0])
+ result = 'Definition for "%s": ' % correction[0]
sections = []
for section in definition:
@@ -41,7 +40,7 @@ def define(inp):
for article in sections:
result += article[0]
if len(article) > 2:
- result += u' '.join(u'{}. {}'.format(n + 1, section)
+ result += ' '.join('%d. %s' % (n + 1, section)
for n, section in enumerate(article[1:]))
else:
result += article[1] + ' '
@@ -68,7 +67,7 @@ def define(inp):
@hook.command('e')
@hook.command
def etymology(inp):
- """etymology -- Retrieves the etymology of ."""
+ "etymology -- Retrieves the etymology of ."
url = 'http://www.etymonline.com/index.php'
@@ -77,7 +76,7 @@ def etymology(inp):
etym = h.xpath('//dl')
if not etym:
- return u'No etymology found for {} :('.format(inp)
+ return 'No etymology found for ' + inp + ' :('
etym = etym[0].text_content()
diff --git a/disabled_stuff/down.py b/plugins/down.py
old mode 100644
new mode 100755
similarity index 67%
rename from disabled_stuff/down.py
rename to plugins/down.py
index f03c078..aab75b9
--- a/disabled_stuff/down.py
+++ b/plugins/down.py
@@ -5,7 +5,7 @@ from util import hook, http
@hook.command
def down(inp):
- """down -- Checks if the site at is up or down."""
+ "down -- Checks if the site at is up or down."
if 'http://' not in inp:
inp = 'http://' + inp
@@ -15,6 +15,6 @@ def down(inp):
# http://mail.python.org/pipermail/python-list/2006-December/589854.html
try:
http.get(inp, get_method='HEAD')
- return '{} seems to be up'.format(inp)
+ return inp + ' seems to be up'
except http.URLError:
- return '{} seems to be down'.format(inp)
+ return inp + ' seems to be down'
diff --git a/disabled_stuff/drama.py b/plugins/drama.py
old mode 100644
new mode 100755
similarity index 64%
rename from disabled_stuff/drama.py
rename to plugins/drama.py
index d348cba..47df5e2
--- a/disabled_stuff/drama.py
+++ b/plugins/drama.py
@@ -1,7 +1,4 @@
-import re
-
-from util import hook, http, text
-
+from util import hook, http
api_url = "http://encyclopediadramatica.se/api.php?action=opensearch"
ed_url = "http://encyclopediadramatica.se/"
@@ -9,11 +6,10 @@ ed_url = "http://encyclopediadramatica.se/"
@hook.command
def drama(inp):
- """drama -- Gets the first paragraph of
- the Encyclopedia Dramatica article on ."""
+ "drama -- Gets the first paragraph of" \
+ " the Encyclopedia Dramatica article on ."
j = http.get_json(api_url, search=inp)
-
if not j[1]:
return "No results found."
article_name = j[1][0].replace(' ', '_').encode('utf8')
@@ -24,8 +20,8 @@ def drama(inp):
for p in page.xpath('//div[@id="bodyContent"]/p'):
if p.text_content():
summary = " ".join(p.text_content().splitlines())
- summary = re.sub("\[\d+\]", "", summary)
- summary = text.truncate_str(summary, 220)
- return "{} :: {}".format(summary, url)
+ if len(summary) > 300:
+ summary = summary[:summary.rfind(' ', 0, 300)] + "..."
+ return "%s :: \x02%s\x02" % (summary, url)
return "Unknown Error."
diff --git a/disabled_stuff/fact.py b/plugins/fact.py
old mode 100644
new mode 100755
similarity index 67%
rename from disabled_stuff/fact.py
rename to plugins/fact.py
index 1d48ae7..6ca9a64
--- a/disabled_stuff/fact.py
+++ b/plugins/fact.py
@@ -2,8 +2,8 @@ from util import hook, http, web
@hook.command(autohelp=False)
-def fact(inp):
- """fact -- Gets a random fact from OMGFACTS."""
+def fact(inp, say=False, nick=False):
+ "fact -- Gets a random fact from OMGFACTS."
attempts = 0
@@ -20,10 +20,10 @@ def fact(inp):
response = soup.find('a', {'class': 'surprise'})
link = response['href']
- fact_data = ''.join(response.find(text=True))
+ fact = ''.join(response.find(text=True))
- if fact_data:
- fact_data = fact_data.strip()
+ if fact:
+ fact = fact.strip()
break
else:
if attempts > 2:
@@ -32,6 +32,9 @@ def fact(inp):
attempts += 1
continue
- url = web.try_isgd(link)
+ try:
+ url = web.isgd(link)
+ except (web.ShortenError, http.HTTPError):
+ url = link
- return "{} - {}".format(fact_data, url)
+ return "%s - %s" % (fact, url)
diff --git a/disabled_stuff/factoids.py b/plugins/factoids.py
old mode 100644
new mode 100755
similarity index 54%
rename from disabled_stuff/factoids.py
rename to plugins/factoids.py
index 403e6f5..e3d2fbc
--- a/disabled_stuff/factoids.py
+++ b/plugins/factoids.py
@@ -1,34 +1,29 @@
# Written by Scaevolus 2010
+from util import hook, http, text, execute
import string
+import sqlite3
import re
-from util import hook, http, text, pyexec
-
-
re_lineends = re.compile(r'[\r\n]*')
-db_ready = False
-
# some simple "shortcodes" for formatting purposes
shortcodes = {
- '[b]': '\x02',
- '[/b]': '\x02',
- '[u]': '\x1F',
- '[/u]': '\x1F',
- '[i]': '\x16',
- '[/i]': '\x16'}
+'[b]': '\x02',
+'[/b]': '\x02',
+'[u]': '\x1F',
+'[/u]': '\x1F',
+'[i]': '\x16',
+'[/i]': '\x16'}
def db_init(db):
- global db_ready
- if not db_ready:
- db.execute("create table if not exists mem(word, data, nick,"
- " primary key(word))")
- db.commit()
- db_ready = True
+ db.execute("create table if not exists mem(word, data, nick,"
+ " primary key(word))")
+ db.commit()
def get_memory(db, word):
+
row = db.execute("select data from mem where word=lower(?)",
[word]).fetchone()
if row:
@@ -37,11 +32,11 @@ def get_memory(db, word):
return None
-@hook.command("r", permissions=["addfactoid"])
-@hook.command(permissions=["addfactoid"])
-def remember(inp, nick='', db=None, notice=None):
- """remember [+] -- Remembers with . Add +
- to to append."""
+@hook.command("r", adminonly=True)
+@hook.command(adminonly=True)
+def remember(inp, nick='', db=None, say=None, input=None, notice=None):
+ "remember [+] -- Remembers