2013-09-30 13:38:12 +02:00
|
|
|
from util import hook, http, text, timesince
|
|
|
|
from datetime import datetime
|
2012-07-12 14:55:10 +02:00
|
|
|
import re
|
2013-09-30 13:38:12 +02:00
|
|
|
import random
|
2012-07-12 14:55:10 +02:00
|
|
|
|
|
|
|
reddit_re = (r'.*((www\.)?reddit\.com/r[^ ]+)', re.I)
|
|
|
|
|
2013-09-30 13:38:12 +02:00
|
|
|
base_url = "http://reddit.com/r/{}/.json"
|
|
|
|
short_url = "http://redd.it/{}"
|
|
|
|
|
2013-09-04 12:30:04 +02:00
|
|
|
|
2012-07-12 14:55:10 +02:00
|
|
|
@hook.regex(*reddit_re)
|
|
|
|
def reddit_url(match):
|
|
|
|
thread = http.get_html(match.group(0))
|
|
|
|
|
|
|
|
title = thread.xpath('//title/text()')[0]
|
|
|
|
upvotes = thread.xpath("//span[@class='upvotes']/span[@class='number']/text()")[0]
|
|
|
|
downvotes = thread.xpath("//span[@class='downvotes']/span[@class='number']/text()")[0]
|
|
|
|
author = thread.xpath("//div[@id='siteTable']//a[contains(@class,'author')]/text()")[0]
|
|
|
|
timeago = thread.xpath("//div[@id='siteTable']//p[@class='tagline']/time/text()")[0]
|
|
|
|
comments = thread.xpath("//div[@id='siteTable']//a[@class='comments']/text()")[0]
|
|
|
|
|
2013-10-08 00:33:00 +02:00
|
|
|
return u'\x02{}\x02 - posted by \x02{}\x02 {} ago - {} upvotes, {} downvotes - {}'.format(
|
2013-09-04 12:30:04 +02:00
|
|
|
title, author, timeago, upvotes, downvotes, comments)
|
2013-09-30 13:38:12 +02:00
|
|
|
|
|
|
|
|
2013-09-30 23:03:46 +02:00
|
|
|
@hook.command(autohelp=False)
|
2013-09-30 13:38:12 +02:00
|
|
|
def reddit(inp):
|
|
|
|
"""reddit <subreddit> [n] -- Gets a random post from <subreddit>, or gets the [n]th post in the subreddit."""
|
2013-09-30 16:06:53 +02:00
|
|
|
id_num = None
|
2013-09-30 13:38:12 +02:00
|
|
|
|
2013-09-30 23:03:46 +02:00
|
|
|
if inp:
|
|
|
|
# clean and split the input
|
|
|
|
parts = inp.lower().strip().split()
|
|
|
|
|
|
|
|
# find the requested post number (if any)
|
|
|
|
if len(parts) > 1:
|
2013-10-01 00:53:15 +02:00
|
|
|
url = base_url.format(parts[0].strip())
|
2013-11-12 07:06:06 +01:00
|
|
|
try:
|
2013-09-30 23:03:46 +02:00
|
|
|
id_num = int(parts[1]) - 1
|
|
|
|
except ValueError:
|
2013-10-01 00:42:36 +02:00
|
|
|
return "Invalid post number."
|
|
|
|
else:
|
2013-10-01 00:53:15 +02:00
|
|
|
url = base_url.format(parts[0].strip())
|
2013-09-30 23:03:46 +02:00
|
|
|
else:
|
2013-11-12 07:06:06 +01:00
|
|
|
url = "http://reddit.com/.json"
|
2013-09-30 13:38:12 +02:00
|
|
|
|
|
|
|
try:
|
2013-10-01 00:53:15 +02:00
|
|
|
data = http.get_json(url, user_agent=http.ua_chrome)
|
2013-09-30 13:38:12 +02:00
|
|
|
except Exception as e:
|
|
|
|
return "Error: " + str(e)
|
|
|
|
data = data["data"]["children"]
|
|
|
|
|
|
|
|
# get the requested/random post
|
2013-11-26 23:07:53 +01:00
|
|
|
if id_num != None:
|
2013-09-30 13:38:12 +02:00
|
|
|
try:
|
|
|
|
item = data[id_num]["data"]
|
|
|
|
except IndexError:
|
|
|
|
length = len(data)
|
|
|
|
return "Invalid post number. Number must be between 1 and {}.".format(length)
|
|
|
|
else:
|
|
|
|
item = random.choice(data)["data"]
|
|
|
|
|
|
|
|
item["title"] = text.truncate_str(item["title"], 50)
|
|
|
|
item["link"] = short_url.format(item["id"])
|
|
|
|
|
|
|
|
rawtime = datetime.fromtimestamp(int(item["created_utc"]))
|
|
|
|
item["timesince"] = timesince.timesince(rawtime)
|
|
|
|
|
2013-09-30 16:10:19 +02:00
|
|
|
if item["over_18"]:
|
|
|
|
item["warning"] = " \x02NSFW\x02"
|
|
|
|
else:
|
|
|
|
item["warning"] = ""
|
|
|
|
|
2013-09-30 23:03:46 +02:00
|
|
|
return u'\x02{title} : {subreddit}\x02 - posted by \x02{author}\x02' \
|
2013-09-30 13:38:12 +02:00
|
|
|
' {timesince} ago - {ups} upvotes, {downs} downvotes -' \
|
2013-09-30 16:10:19 +02:00
|
|
|
' {link}{warning}'.format(**item)
|