Better formatting, 4-space indents, use http.get_soup instead of BeautifulSoup directly
This commit is contained in:
parent
0100f1d071
commit
e042aa1bbe
1 changed files with 30 additions and 18 deletions
|
@ -1,32 +1,44 @@
|
||||||
import re
|
import re
|
||||||
from util import hook, http
|
from util import hook, http
|
||||||
import json
|
|
||||||
from BeautifulSoup import BeautifulSoup
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
newgrounds_re = (r'(.*:)//(www.newgrounds.com|newgrounds.com)(:[0-9]+)?(.*)', re.I)
|
newgrounds_re = (r'(.*:)//(www.newgrounds.com|newgrounds.com)(:[0-9]+)?(.*)', re.I)
|
||||||
valid = set('0123456789')
|
valid = set('0123456789')
|
||||||
|
|
||||||
|
|
||||||
def test(s):
|
def test(s):
|
||||||
return set(s) <= valid
|
return set(s) <= valid
|
||||||
|
|
||||||
|
|
||||||
@hook.regex(*newgrounds_re)
|
@hook.regex(*newgrounds_re)
|
||||||
def newgrounds_url(match):
|
def newgrounds_url(match):
|
||||||
location = match.group(4).split("/")[-1]
|
location = match.group(4).split("/")[-1]
|
||||||
if not test(location):
|
if not test(location):
|
||||||
return "Not a valid Newgrounds portal ID. Example: http://www.newgrounds.com/portal/view/593993"
|
print "Not a valid Newgrounds portal ID. Example: http://www.newgrounds.com/portal/view/593993"
|
||||||
|
return None
|
||||||
|
soup = http.get_soup("http://www.newgrounds.com/portal/view/" + location)
|
||||||
|
title = "\x02{}\x02".format(soup.find('title').text)
|
||||||
try:
|
try:
|
||||||
urlobj = urllib2.urlopen("http://www.newgrounds.com/portal/view/" + location)
|
author = " - \x02{}\x02".format(soup.find('ul', {'class': 'authorlinks'}).find('img')['alt'])
|
||||||
except urllib2.HTTPError:
|
except:
|
||||||
return "\x034\x02Invalid response. Maybe Newgrounds is down for maintenance?"
|
author = ""
|
||||||
soup = BeautifulSoup(urlobj.read())
|
|
||||||
try:
|
try:
|
||||||
title = soup.find('title').text
|
rating = u" - rated \x02%s\x02/\x025.0\x02" % soup.find('dd', {'class': 'star-variable'})['title'].split("Stars –")[0].strip()
|
||||||
author = soup.find('ul', {'class': 'authorlinks'}).find('img')['alt']
|
except:
|
||||||
rating = u"\x02%s\x02/\x025.0\x02" % soup.find('dd', {'class': 'star-variable'})['title'].split("Stars –")[0].strip()
|
rating = ""
|
||||||
numofratings = soup.find('dd', {'class': 'star-variable'})['title'].split("Stars –")[1].replace("Votes", "").strip()
|
|
||||||
views = soup.find('dl', {'class': 'contentdata'}).findAll('dd')[1].find('strong').text
|
try:
|
||||||
date = soup.find('dl', {'class': 'sidestats'}).find('dd').text
|
numofratings = " ({})".format(soup.find('dd', {'class': 'star-variable'})['title'].split("Stars –")[1].replace("Votes", "").strip())
|
||||||
except Exception:
|
except:
|
||||||
return "\x034\x02Could not find item information."
|
numofratings = ""
|
||||||
return u"\x02%s\x02 - rated %s (%s) - \x02%s\x02 views - \x02%s\x02 on \x02%s\x02" % (title, rating, numofratings, views, author, date)
|
|
||||||
|
try:
|
||||||
|
views = " - \x02{}\x02 views".format(soup.find('dl', {'class': 'contentdata'}).findAll('dd')[1].find('strong').text)
|
||||||
|
except:
|
||||||
|
views = ""
|
||||||
|
|
||||||
|
try:
|
||||||
|
date = "on \x02{}\x02".format(soup.find('dl', {'class': 'sidestats'}).find('dd').text)
|
||||||
|
except:
|
||||||
|
date = ""
|
||||||
|
return title + rating + numofratings + views + author + date
|
||||||
|
|
Reference in a new issue