2011-11-20 10:23:31 +01:00
|
|
|
import re
|
|
|
|
import time
|
|
|
|
|
2013-12-11 05:17:56 +01:00
|
|
|
from util import hook, http, timeformat
|
2011-11-20 10:23:31 +01:00
|
|
|
|
|
|
|
|
|
|
|
youtube_re = (r'(?:youtube.*?(?:v=|/v/)|youtu\.be/|yooouuutuuube.*?id=)'
|
2012-02-29 08:10:36 +01:00
|
|
|
'([-_a-zA-Z0-9]+)', re.I)
|
2011-11-20 10:23:31 +01:00
|
|
|
|
|
|
|
base_url = 'http://gdata.youtube.com/feeds/api/'
|
2012-11-12 11:46:38 +01:00
|
|
|
api_url = base_url + 'videos/{}?v=2&alt=jsonc'
|
2011-11-20 10:23:31 +01:00
|
|
|
search_api_url = base_url + 'videos?v=2&alt=jsonc&max-results=1'
|
2012-08-20 22:34:24 +02:00
|
|
|
video_url = "http://youtu.be/%s"
|
2011-11-20 10:23:31 +01:00
|
|
|
|
|
|
|
|
2013-11-29 04:35:41 +01:00
|
|
|
def plural(num=0, text=''):
|
2013-11-30 07:19:04 +01:00
|
|
|
return "{:,} {}{}".format(num, text, "s"[num==1:])
|
2013-11-29 04:35:41 +01:00
|
|
|
|
|
|
|
|
2013-12-11 05:17:56 +01:00
|
|
|
|
2013-11-30 09:16:44 +01:00
|
|
|
|
|
|
|
|
2012-11-12 11:46:38 +01:00
|
|
|
def get_video_description(video_id):
|
|
|
|
request = http.get_json(api_url.format(video_id))
|
2011-11-20 10:23:31 +01:00
|
|
|
|
2012-08-27 21:08:09 +02:00
|
|
|
if request.get('error'):
|
2011-11-20 10:23:31 +01:00
|
|
|
return
|
|
|
|
|
2012-08-27 21:08:09 +02:00
|
|
|
data = request['data']
|
2011-11-30 13:37:22 +01:00
|
|
|
|
2013-12-03 10:54:38 +01:00
|
|
|
out = u'\x02{}\x02'.format(data['title'])
|
2011-11-20 10:23:31 +01:00
|
|
|
|
2012-08-27 21:08:09 +02:00
|
|
|
if not data.get('duration'):
|
2011-11-20 10:23:31 +01:00
|
|
|
return out
|
|
|
|
|
2012-08-27 21:08:09 +02:00
|
|
|
length = data['duration']
|
2013-12-11 05:17:56 +01:00
|
|
|
out += u' - length \x02{}\x02'.format(timeformat.format_time(length, simple=True))
|
2011-11-20 10:23:31 +01:00
|
|
|
|
2013-11-29 04:30:06 +01:00
|
|
|
if 'ratingCount' in data:
|
2013-11-30 07:19:04 +01:00
|
|
|
# format
|
2013-11-29 04:35:41 +01:00
|
|
|
likes = plural(int(data['likeCount']), "like")
|
|
|
|
dislikes = plural(data['ratingCount'] - int(data['likeCount']), "dislike")
|
2013-11-29 04:30:06 +01:00
|
|
|
|
2013-11-29 04:35:41 +01:00
|
|
|
percent = 100 * float(data['likeCount'])/float(data['ratingCount'])
|
2013-12-03 10:54:38 +01:00
|
|
|
out += u' - {}, {} (\x02{:.1f}\x02%)'.format(likes,
|
2013-11-30 07:19:04 +01:00
|
|
|
dislikes, percent)
|
2012-03-04 01:47:14 +01:00
|
|
|
|
2012-08-30 12:30:29 +02:00
|
|
|
if 'viewCount' in data:
|
2013-11-30 07:19:04 +01:00
|
|
|
views = data['viewCount']
|
2013-12-03 10:54:38 +01:00
|
|
|
out += u' - \x02{:,}\x02 view{}'.format(views, "s"[views==1:])
|
2011-11-20 10:23:31 +01:00
|
|
|
|
2013-09-21 14:18:08 +02:00
|
|
|
try:
|
2013-09-21 14:27:20 +02:00
|
|
|
uploader = http.get_json(base_url + "users/{}?alt=json".format(data["uploader"]))["entry"]["author"][0]["name"]["$t"]
|
2013-09-21 14:18:08 +02:00
|
|
|
except:
|
|
|
|
uploader = data["uploader"]
|
2013-11-30 07:19:04 +01:00
|
|
|
|
2012-08-27 21:08:09 +02:00
|
|
|
upload_time = time.strptime(data['uploaded'], "%Y-%m-%dT%H:%M:%S.000Z")
|
2013-12-03 10:54:38 +01:00
|
|
|
out += u' - \x02{}\x02 on \x02{}\x02'.format(uploader,
|
2013-11-30 07:19:04 +01:00
|
|
|
time.strftime("%Y.%m.%d", upload_time))
|
2011-11-20 10:23:31 +01:00
|
|
|
|
2012-08-27 21:08:09 +02:00
|
|
|
if 'contentRating' in data:
|
2013-12-03 10:54:38 +01:00
|
|
|
out += u' - \x034NSFW\x02'
|
2011-11-20 10:23:31 +01:00
|
|
|
|
2011-11-30 13:37:22 +01:00
|
|
|
return out
|
2011-11-20 10:23:31 +01:00
|
|
|
|
2012-02-29 06:58:38 +01:00
|
|
|
|
2011-11-20 10:23:31 +01:00
|
|
|
@hook.regex(*youtube_re)
|
|
|
|
def youtube_url(match):
|
|
|
|
return get_video_description(match.group(1))
|
|
|
|
|
2012-02-29 06:58:38 +01:00
|
|
|
|
2013-12-03 10:54:38 +01:00
|
|
|
@hook.command('you')
|
2012-02-15 08:06:52 +01:00
|
|
|
@hook.command('yt')
|
2011-11-20 10:23:31 +01:00
|
|
|
@hook.command('y')
|
|
|
|
@hook.command
|
|
|
|
def youtube(inp):
|
2013-09-04 12:30:04 +02:00
|
|
|
"""youtube <query> -- Returns the first YouTube search result for <query>."""
|
2012-08-27 21:08:09 +02:00
|
|
|
request = http.get_json(search_api_url, q=inp)
|
2011-11-20 10:23:31 +01:00
|
|
|
|
2012-08-27 21:08:09 +02:00
|
|
|
if 'error' in request:
|
2011-11-20 10:23:31 +01:00
|
|
|
return 'error performing search'
|
|
|
|
|
2012-08-27 21:08:09 +02:00
|
|
|
if request['data']['totalItems'] == 0:
|
2011-11-20 10:23:31 +01:00
|
|
|
return 'no results found'
|
|
|
|
|
2012-08-27 21:08:09 +02:00
|
|
|
video_id = request['data']['items'][0]['id']
|
2011-11-20 10:23:31 +01:00
|
|
|
|
2013-12-03 10:54:38 +01:00
|
|
|
return get_video_description(video_id) + u" - " + video_url % video_id
|
2013-06-27 12:42:18 +02:00
|
|
|
|
2013-09-04 12:30:04 +02:00
|
|
|
|
2013-11-30 09:16:44 +01:00
|
|
|
|
|
|
|
@hook.command('ytime')
|
|
|
|
@hook.command
|
|
|
|
def youtime(inp):
|
|
|
|
"""youtime <query> -- Gets the total run time of the first YouTube search result for <query>."""
|
|
|
|
request = http.get_json(search_api_url, q=inp)
|
|
|
|
|
|
|
|
if 'error' in request:
|
|
|
|
return 'error performing search'
|
|
|
|
|
|
|
|
if request['data']['totalItems'] == 0:
|
|
|
|
return 'no results found'
|
|
|
|
|
|
|
|
video_id = request['data']['items'][0]['id']
|
|
|
|
request = http.get_json(api_url.format(video_id))
|
|
|
|
|
|
|
|
if request.get('error'):
|
|
|
|
return
|
|
|
|
data = request['data']
|
|
|
|
|
|
|
|
if not data.get('duration'):
|
|
|
|
return
|
|
|
|
|
|
|
|
length = data['duration']
|
|
|
|
views = data['viewCount']
|
|
|
|
total = int(length * views)
|
|
|
|
|
2013-12-11 05:17:56 +01:00
|
|
|
length_text = timeformat.format_time(length, simple=True)
|
|
|
|
total_text = timeformat.format_time(total, accuracy=8)
|
2013-11-30 09:16:44 +01:00
|
|
|
|
|
|
|
return u'The video \x02{}\x02 has a length of {} and has been viewed {:,} times for ' \
|
|
|
|
'a total run time of {}!'.format(data['title'], length_text, views, \
|
|
|
|
total_text)
|
|
|
|
|
|
|
|
|
2013-06-27 12:42:18 +02:00
|
|
|
ytpl_re = (r'(.*:)//(www.youtube.com/playlist|youtube.com/playlist)(:[0-9]+)?(.*)', re.I)
|
|
|
|
|
2013-09-04 12:30:04 +02:00
|
|
|
|
2013-06-27 12:42:18 +02:00
|
|
|
@hook.regex(*ytpl_re)
|
|
|
|
def ytplaylist_url(match):
|
|
|
|
location = match.group(4).split("=")[-1]
|
|
|
|
try:
|
2013-09-04 12:30:04 +02:00
|
|
|
soup = http.get_soup("https://www.youtube.com/playlist?list=" + location)
|
2013-06-27 12:42:18 +02:00
|
|
|
except Exception:
|
2013-09-04 12:30:04 +02:00
|
|
|
return "\x034\x02Invalid response."
|
2013-06-27 12:42:18 +02:00
|
|
|
title = soup.find('title').text.split('-')[0].strip()
|
|
|
|
author = soup.find('img', {'class': 'channel-header-profile-image'})['title']
|
|
|
|
numvideos = soup.find('ul', {'class': 'header-stats'}).findAll('li')[0].text.split(' ')[0]
|
|
|
|
views = soup.find('ul', {'class': 'header-stats'}).findAll('li')[1].text.split(' ')[0]
|
|
|
|
return u"\x02%s\x02 - \x02%s\x02 views - \x02%s\x02 videos - \x02%s\x02" % (title, views, numvideos, author)
|