Merge branch 'develop' into refresh

This commit is contained in:
Luke Rogers 2013-12-01 20:44:24 +13:00
commit bdce09435a
8 changed files with 229 additions and 109 deletions

View file

@ -5,6 +5,8 @@ import re
db_ready = False
CAN_DOWNVOTE = False
def db_init(db):
db.execute("""CREATE TABLE if not exists karma(
@ -92,7 +94,7 @@ def karma_add(match, nick='', chan='', db=None, notice=None):
total_karma) values(?,?,?,?)""", (nick_vote.lower(), 0, 0, 0))
up(db, nick_vote)
notice("Gave {} 1 karma!".format(nick_vote))
if match.group(2) == '--':
if match.group(2) == '--' and CAN_DOWNVOTE:
db.execute("""INSERT or IGNORE INTO karma(
nick_vote,
up_karma,

View file

@ -1,73 +1,89 @@
from util import hook, http, web
import time
import json
from urllib2 import HTTPError
import random
from os import path
base_url = "http://api.bukget.org/3/"
search_url = base_url + "search/plugin_name/like/{}"
details_url = base_url + "plugins/bukkit/{}"
@hook.command('randomplugin')
@hook.command(autohelp=False)
def randombukkitplugin(inp, reply=None):
if not path.exists("plugins/data/bukgetplugins"):
with open("plugins/data/bukgetplugins", "w") as f:
f.write(http.get("http://api.bukget.org/3/plugins/bukkit"))
jsahn = json.loads(open("plugins/data/bukgetplugins", "r").read())
pickslug = random.choice(jsahn)['slug']
data = getplugininfo(pickslug)
name = data['plugin_name']
description = data['description']
url = data['website']
authors = data['authors'][0]
authors = authors[0] + u"\u200b" + authors[1:]
stage = data['stage']
lastUpdate = time.strftime('%d %B %Y %H:%M',
time.gmtime(data['versions'][0]['date']))
lastVersion = data['versions'][0]['version']
bukkitver = ", ".join(data['versions'][0]['game_versions'])
link = web.isgd(data['versions'][0]['link'])
if description != "":
reply("\x02{}\x02, by \x02{}\x02 - {} - ({}) \x02{}".format(name, authors, description, stage, url))
else:
reply("\x02{}\x02, by \x02{}\x02 ({}) \x02{}".format(name, authors, stage, url))
reply("Last release: \x02v{}\x02 for \x02{}\x02 at {} \x02{}\x02".format(lastVersion, bukkitver, lastUpdate, link))
class BukgetError(Exception):
def __init__(self, code, text):
self.code = code
self.text = text
def __str__(self):
return self.text
def plugin_search(term):
""" searches for a plugin with the bukget API and returns the slug """
term = term.lower().strip()
search_term = http.quote_plus(term)
try:
results = http.get_json(search_url.format(search_term))
except (http.HTTPError, http.URLError) as e:
raise BukgetError(500, "Error Fetching Search Page: {}".format(e))
if not results:
raise BukgetError(404, "No Results Found")
for result in results:
if result["slug"] == term:
return result["slug"]
return results[0]["slug"]
def plugin_details(slug):
""" takes a plugin slug and returns details from the bukget API """
slug = slug.lower().strip()
try:
details = http.get_json(details_url.format(slug))
except (http.HTTPError, http.URLError) as e:
raise BukgetError(500, "Error Fetching Details: {}".format(e))
return details
@hook.command('bplugin')
@hook.command('plugin')
@hook.command
def bukkitplugin(inp, reply=None):
"""plugin <bukkit plugin slug> - Look up a plugin on dev.bukkit.org"""
data = getplugininfo(inp.lower())
def bukkitplugin(inp, reply=None, message=None):
"""plugin <slug/name> - Look up a plugin on dev.bukkit.org"""
# get the plugin slug using search
try:
name = data['plugin_name']
except ValueError:
return data
slug = plugin_search(inp)
except BukgetError as e:
return e
# get the plugin info using the slug
try:
data = plugin_details(slug)
except BukgetError as e:
return e
name = data["plugin_name"]
description = data['description']
url = data['website']
authors = data['authors'][0]
authors = authors[0] + u"\u200b" + authors[1:]
stage = data['stage']
lastUpdate = time.strftime('%d %B %Y %H:%M',
time.gmtime(data['versions'][0]['date']))
lastVersion = data['versions'][0]['version']
bukkitver = ", ".join(data['versions'][0]['game_versions'])
link = web.isgd(data['versions'][0]['link'])
if description != "":
reply("\x02{}\x02, by \x02{}\x02 - {} - ({}) \x02{}".format(name, authors, description, stage, url))
else:
reply("\x02{}\x02, by \x02{}\x02 ({}) \x02{}".format(name, authors, stage, url))
reply("Last release: \x02v{}\x02 for \x02{}\x02 at {} \x02{}\x02".format(lastVersion, bukkitver, lastUpdate, link))
current_version = data['versions'][0]
def getplugininfo(inp):
if len(inp.split(" ")) > 1:
slug = inp.split(" ")[0]
last_update = time.strftime('%d %B %Y %H:%M',
time.gmtime(current_version['date']))
version_number = data['versions'][0]['version']
bukkit_versions = ", ".join(current_version['game_versions'])
link = web.try_isgd(current_version['link'])
if description:
reply(u"\x02{}\x02, by \x02{}\x02 - {} - ({}) \x02{}".format(name, authors, description, stage, url))
else:
slug = inp
try:
data = http.get_json("http://api.bukget.org/3/plugins/bukkit/%s/"
% slug)
except HTTPError as e:
return "Got error: {}".format(e)
return data
reply(u"\x02{}\x02, by \x02{}\x02 ({}) \x02{}".format(name, authors, stage, url))
message(u"Last release: \x02v{}\x02 for \x02{}\x02 at {} \x02{}\x02".format(version_number, bukkit_versions, last_update, link))

View file

@ -85,8 +85,10 @@ def newegg(inp):
)
# get the first result
item = r["ProductListItems"][0]
return format_item(item)
if r["ProductListItems"]:
item = r["ProductListItems"][0]
return format_item(item)
else:
return "No results found."

View file

@ -14,11 +14,11 @@ def spell(inp):
return "Could not find dictionary: {}".format(locale)
if len(inp.split(" ")) > 1:
# input is a sentence
chkr = SpellChecker(locale)
chkr.set_text(inp)
offset = 0
for err in chkr:
# find the location of the incorrect word
start = err.wordpos + offset
@ -31,9 +31,9 @@ def spell(inp):
offset = (offset + len(s_string)) - len(err.word)
# replace the word with the suggestions
inp = inp[:start] + s_string + inp[finish:]
return inp
else:
# input is a word
dictionary = enchant.Dict(locale)
is_correct = dictionary.check(inp)
suggestions = dictionary.suggest(inp)

View file

@ -1,6 +1,4 @@
import random
from util import hook, http, web
from util import hook, web
@hook.command
@ -13,7 +11,7 @@ def stock(inp):
# if we dont get a company name back, the symbol doesn't match a company
if quote['Change'] is None:
return "unknown ticker symbol %s" % inp
return "Unknown ticker symbol: {}".format(sym)
change = float(quote['Change'])
price = float(quote['LastTradePriceOnly'])
@ -24,10 +22,9 @@ def stock(inp):
quote['color'] = "3"
quote['PercentChange'] = 100 * change / (price - change)
print quote
ret = "\x02%(Name)s\x02 (\x02%(symbol)s\x02) - %(LastTradePriceOnly)s " \
"\x03%(color)s%(Change)s (%(PercentChange).2f%%)\x03 " \
"Day Range: %(DaysRange)s " \
"MCAP: %(MarketCapitalization)s" % quote
return ret
return u"\x02{Name}\x02 (\x02{symbol}\x02) - {LastTradePriceOnly} " \
"\x03{color}{Change} ({PercentChange:.2f}%)\x03 " \
"Day Range: {DaysRange} " \
"MCAP: {MarketCapitalization}".format(**quote)

View file

@ -4,13 +4,10 @@ import re
import random
from datetime import datetime
TWITTER_RE = (r"(?:(?:www.twitter.com|twitter.com)/(?:[-_a-zA-Z0-9]+)/status/)([0-9]+)", re.I)
@hook.command("tw")
@hook.command("twatter")
@hook.command
def twitter(inp, bot=None):
"twitter <user> [n] -- Gets last/[n]th tweet from <user>"
def get_api(bot):
consumer_key = bot.config.get("api_keys", {}).get("twitter_consumer_key")
consumer_secret = bot.config.get("api_keys", {}).get("twitter_consumer_secret")
@ -18,12 +15,48 @@ def twitter(inp, bot=None):
oauth_secret = bot.config.get("api_keys", {}).get("twitter_access_secret")
if not consumer_key:
return "Error: No Twitter API details."
return False
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(oauth_token, oauth_secret)
api = tweepy.API(auth)
return tweepy.API(auth)
@hook.regex(*TWITTER_RE)
def twitter_url(match, bot=None):
tweet_id = match.group(1)
api = get_api(bot)
if not api:
return
try:
tweet = api.get_status(tweet_id)
user = tweet.user
except tweepy.error.TweepError:
return
text = " ".join(tweet.text.split())
if user.verified:
prefix = u"\u2713"
else:
prefix = ""
time = timesince.timesince(tweet.created_at, datetime.utcnow())
return u"{}@\x02{}\x02 ({}): {} ({} ago)".format(prefix, user.screen_name, user.name, text, time)
@hook.command("tw")
@hook.command("twatter")
@hook.command
def twitter(inp, bot=None):
"twitter <user> [n] -- Gets last/[n]th tweet from <user>"
api = get_api(bot)
if not api:
return "Error: No Twitter API details."
if re.match(r'^\d+$', inp):
# user is getting a tweet by id
@ -102,20 +135,10 @@ def twitter(inp, bot=None):
def twuser(inp, bot=None):
"""twuser <user> -- Get info on the Twitter user <user>"""
consumer_key = bot.config.get("api_keys", {}).get("twitter_consumer_key")
consumer_secret = bot.config.get("api_keys", {}).get("twitter_consumer_secret")
oauth_token = bot.config.get("api_keys", {}).get("twitter_access_token")
oauth_secret = bot.config.get("api_keys", {}).get("twitter_access_secret")
if not consumer_key:
api = get_api(bot)
if not api:
return "Error: No Twitter API details."
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(oauth_token, oauth_secret)
api = tweepy.API(auth)
try:
# try to get user by username
user = api.get_user(inp)

View file

@ -37,9 +37,9 @@ def wolframalpha(inp, bot=None):
if subpod:
results.append(subpod)
if results:
pod_texts.append(title + ': ' + ', '.join(results))
pod_texts.append(title + u': ' + u', '.join(results))
ret = ' - '.join(pod_texts)
ret = u' - '.join(pod_texts)
if not pod_texts:
return 'No results.'
@ -56,4 +56,4 @@ def wolframalpha(inp, bot=None):
if not ret:
return 'No results.'
return "{} - {}".format(ret, short_url)
return u"{} - {}".format(ret, short_url)

View file

@ -1,7 +1,7 @@
import re
import time
from util import hook, http
from util import hook, http, text
youtube_re = (r'(?:youtube.*?(?:v=|/v/)|youtu\.be/|yooouuutuuube.*?id=)'
@ -14,7 +14,56 @@ video_url = "http://youtu.be/%s"
def plural(num=0, text=''):
return "%d %s%s" % (num, text, "s"[num==1:])
return "{:,} {}{}".format(num, text, "s"[num==1:])
def format_time(seconds, count=3, accuracy=6, simple=False):
if simple:
periods = [
('c', 60 * 60 * 24 * 365 * 100),
('de', 60 * 60 * 24 * 365 * 10),
('y', 60 * 60 * 24 * 365),
('m', 60 * 60 * 24 * 30),
('d', 60 * 60 * 24),
('h', 60 * 60),
('m', 60),
('s', 1)
]
else:
periods = [
(('century', 'centuries'), 60 * 60 * 24 * 365 * 100),
(('decade', 'decades'), 60 * 60 * 24 * 365 * 10),
(('year', 'years'), 60 * 60 * 24 * 365),
(('month', 'months'), 60 * 60 * 24 * 30),
(('day', 'days'), 60 * 60 * 24),
(('hour', 'hours'), 60 * 60),
(('minute', 'minutes'), 60),
(('second', 'seconds'), 1)
]
periods = periods[-accuracy:]
strings = []
i = 0
for period_name, period_seconds in periods:
if i < count:
if seconds > period_seconds:
period_value, seconds = divmod(seconds, period_seconds)
i += 1
if simple:
strings.append("{}{}".format(period_value, period_name))
else:
if period_value == 1:
strings.append("{} {}".format(period_value, period_name[0]))
else:
strings.append("{} {}".format(period_value, period_name[1]))
else:
break
if simple:
return " ".join(strings)
else:
return text.get_text_list(strings, "and")
def get_video_description(video_id):
@ -25,38 +74,35 @@ def get_video_description(video_id):
data = request['data']
out = '\x02%s\x02' % data['title']
out = '\x02{}\x02'.format(data['title'])
if not data.get('duration'):
return out
out += ' - length \x02'
length = data['duration']
if length / 3600: # > 1 hour
out += '%dh ' % (length / 3600)
if length / 60:
out += '%dm ' % (length / 60 % 60)
out += "%ds\x02" % (length % 60)
out += ' - length \x02{}\x02'.format(format_time(length, simple=True))
if 'ratingCount' in data:
# format
likes = plural(int(data['likeCount']), "like")
dislikes = plural(data['ratingCount'] - int(data['likeCount']), "dislike")
percent = 100 * float(data['likeCount'])/float(data['ratingCount'])
out += ' - {}, {} (\x02{:.1f}\x02%)'.format(likes,
dislikes, percent)
dislikes, percent)
if 'viewCount' in data:
out += ' - \x02%s\x02 views' % format(data['viewCount'], ",d")
views = data['viewCount']
out += ' - \x02{:,}\x02 view{}'.format(views, "s"[views==1:])
try:
uploader = http.get_json(base_url + "users/{}?alt=json".format(data["uploader"]))["entry"]["author"][0]["name"]["$t"]
except:
uploader = data["uploader"]
upload_time = time.strptime(data['uploaded'], "%Y-%m-%dT%H:%M:%S.000Z")
out += ' - \x02%s\x02 on \x02%s\x02' % (uploader,
time.strftime("%Y.%m.%d", upload_time))
out += ' - \x02{}\x02 on \x02{}\x02'.format(uploader,
time.strftime("%Y.%m.%d", upload_time))
if 'contentRating' in data:
out += ' - \x034NSFW\x02'
@ -74,7 +120,6 @@ def youtube_url(match):
@hook.command
def youtube(inp):
"""youtube <query> -- Returns the first YouTube search result for <query>."""
request = http.get_json(search_api_url, q=inp)
if 'error' in request:
@ -88,6 +133,41 @@ def youtube(inp):
return get_video_description(video_id) + " - " + video_url % video_id
@hook.command('ytime')
@hook.command
def youtime(inp):
"""youtime <query> -- Gets the total run time of the first YouTube search result for <query>."""
request = http.get_json(search_api_url, q=inp)
if 'error' in request:
return 'error performing search'
if request['data']['totalItems'] == 0:
return 'no results found'
video_id = request['data']['items'][0]['id']
request = http.get_json(api_url.format(video_id))
if request.get('error'):
return
data = request['data']
if not data.get('duration'):
return
length = data['duration']
views = data['viewCount']
total = int(length * views)
length_text = format_time(length, simple=True)
total_text = format_time(total, accuracy=8)
return u'The video \x02{}\x02 has a length of {} and has been viewed {:,} times for ' \
'a total run time of {}!'.format(data['title'], length_text, views, \
total_text)
ytpl_re = (r'(.*:)//(www.youtube.com/playlist|youtube.com/playlist)(:[0-9]+)?(.*)', re.I)