2013-09-05 12:17:05 +02:00
|
|
|
import urllib2
|
|
|
|
import re
|
|
|
|
import json
|
|
|
|
from xml.dom import minidom
|
2013-09-04 12:30:04 +02:00
|
|
|
from util import hook, http, web, text
|
2013-07-31 15:10:48 +02:00
|
|
|
from bs4 import BeautifulSoup
|
2013-09-05 12:17:05 +02:00
|
|
|
|
2012-11-08 09:56:56 +01:00
|
|
|
|
2013-07-31 15:10:48 +02:00
|
|
|
db_ready = False
|
|
|
|
|
|
|
|
steam_re = (r'(.*:)//(store.steampowered.com)(:[0-9]+)?(.*)', re.I)
|
|
|
|
|
|
|
|
currencies = {'USD': 'us', 'euro1': "de", 'euro2': 'no',
|
|
|
|
'pound': 'uk', 'rubles': 'ru', 'real': 'br',
|
|
|
|
'yen': 'jp', 'dollars': 'us', 'german': 'de',
|
|
|
|
'pounds': 'uk', 'russian': 'ru', 'brazil': 'br',
|
|
|
|
'japan': 'jp', 'us': 'us', 'de': 'de', 'no': 'no',
|
|
|
|
'uk': 'uk', 'ru': 'ru', 'br': 'br', 'jp': 'jp'}
|
2013-07-22 20:53:17 +02:00
|
|
|
|
2013-07-22 21:07:25 +02:00
|
|
|
|
2012-11-10 16:47:12 +01:00
|
|
|
def db_init(db):
|
2013-07-22 16:01:47 +02:00
|
|
|
db.execute("create table if not exists steam(nick primary key, acc)")
|
2012-11-10 16:47:12 +01:00
|
|
|
db.commit()
|
2013-07-31 15:10:48 +02:00
|
|
|
db_ready = True
|
2012-11-08 09:56:56 +01:00
|
|
|
|
2013-07-22 16:01:47 +02:00
|
|
|
|
2013-09-05 12:17:05 +02:00
|
|
|
def get_steam_info(name):
|
|
|
|
dom = minidom.parse(urllib2.urlopen(re.sub("{USER}", name, "http://steamcommunity.com/id/{USER}/?xml=1")))
|
|
|
|
ID = int(dom.getElementsByTagName("steamID64")[0].firstChild.data)
|
|
|
|
key = bot.config.get("api_keys", {}).get("steam_key")
|
|
|
|
url = "http://api.steampowered.com/IPlayerService/GetOwnedGames/v0001/?key={}&steamid={}&format=json".format(key,
|
|
|
|
ID)
|
|
|
|
data = json.load(urllib2.urlopen(url))
|
|
|
|
useable = data['response']['games']
|
|
|
|
games = []
|
|
|
|
played = []
|
|
|
|
data = {}
|
|
|
|
playtime = 0
|
|
|
|
for x in useable:
|
|
|
|
games.append(x)
|
|
|
|
if x['playtime_forever'] > 0:
|
|
|
|
played.append(x)
|
|
|
|
playtime += x['playtime_forever']
|
|
|
|
played.sort(key=lambda x: x['playtime_forever'])
|
|
|
|
played.reverse()
|
|
|
|
data['playtime'] = int(playtime / 60.0)
|
|
|
|
data['played'] = played
|
|
|
|
data['games'] = games
|
|
|
|
data['%played'] = round(float(len(played)) / len(games) * 100, 2)
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2013-07-22 16:01:47 +02:00
|
|
|
@hook.command('sc', autohelp=False)
|
|
|
|
@hook.command(autohelp=False)
|
2013-07-22 20:53:17 +02:00
|
|
|
def steamcalc(inp, nick='', db=None):
|
|
|
|
"""steamcalc <username> [currency] - Gets value of steam account and
|
|
|
|
total hours played. Uses steamcommunity.com/id/<nickname>. Uses
|
|
|
|
IRC nickname if none provided. """
|
2013-07-31 15:10:48 +02:00
|
|
|
|
|
|
|
if not db_ready:
|
|
|
|
db_init(db)
|
|
|
|
|
2013-07-22 16:01:47 +02:00
|
|
|
currency = None
|
|
|
|
dontsave = False
|
|
|
|
if not inp:
|
2013-07-22 20:53:17 +02:00
|
|
|
user = db.execute("select acc from steam where nick=lower(?)", (nick,)).fetchone()
|
|
|
|
if not user:
|
|
|
|
inp = nick
|
2013-07-22 16:01:47 +02:00
|
|
|
else:
|
2013-07-22 20:53:17 +02:00
|
|
|
inp = user[0]
|
|
|
|
dontsave = True
|
|
|
|
else:
|
|
|
|
if len(inp.split(" ")) > 1:
|
|
|
|
if inp.split(" ")[1] in currencies:
|
|
|
|
currency = currencies[inp.split(" ")[1]]
|
2013-07-22 21:07:25 +02:00
|
|
|
dontsave = False
|
|
|
|
elif inp.split(" ")[1] == "dontsave":
|
|
|
|
dontsave = True
|
2013-07-22 20:53:17 +02:00
|
|
|
else:
|
|
|
|
return "Invalid currency!"
|
|
|
|
inp = inp.split(" ")[0]
|
2013-07-22 21:07:25 +02:00
|
|
|
if len(inp.split(" ")) > 2:
|
|
|
|
if inp.split(" ")[2] == "dontsave":
|
|
|
|
dontsave = True
|
2013-07-31 15:10:48 +02:00
|
|
|
|
2013-09-04 12:30:04 +02:00
|
|
|
url = http.prepare_url("http://steamdb.info/calculator/",
|
|
|
|
{"player": inp, "currency": currency if currency else "us"})
|
2013-07-31 15:10:48 +02:00
|
|
|
soup = http.get_soup(url)
|
|
|
|
|
2013-08-14 10:20:24 +02:00
|
|
|
out = u""
|
|
|
|
|
2012-11-08 09:56:56 +01:00
|
|
|
try:
|
2013-08-14 10:20:24 +02:00
|
|
|
out += soup.findAll('h1', {'class': 'header-title'})[1].text.strip()
|
2013-07-22 20:53:17 +02:00
|
|
|
except Exception as e:
|
|
|
|
print e
|
2013-09-05 04:36:25 +02:00
|
|
|
return u"\x02Unable to retrieve info for {}!\x02 Is it a valid SteamCommunity profile username ({})? " \
|
|
|
|
"Check if your profile is private, or go here to search: {}".format(
|
2013-09-05 12:17:05 +02:00
|
|
|
inp, web.try_isgd("http://steamcommunity.com/id/%s" % inp), web.try_isgd(url))
|
2013-07-31 15:10:48 +02:00
|
|
|
|
2013-08-14 10:20:24 +02:00
|
|
|
nextone = False
|
|
|
|
status = "Unknown"
|
|
|
|
for i in soup.findAll('td'):
|
|
|
|
if nextone:
|
|
|
|
status = i.text
|
|
|
|
break
|
|
|
|
elif i.text == "Status":
|
2013-09-04 12:30:04 +02:00
|
|
|
nextone = True
|
2013-07-22 20:53:17 +02:00
|
|
|
if status == "Online":
|
2013-07-22 16:01:47 +02:00
|
|
|
status = "\x033\x02Online\x02\x0f"
|
2013-07-22 20:53:17 +02:00
|
|
|
elif status == "Offline":
|
2013-07-22 16:01:47 +02:00
|
|
|
status = "\x034\x02Offline\x02\x0f"
|
2013-07-22 20:53:17 +02:00
|
|
|
elif status == "Away":
|
2013-07-22 16:01:47 +02:00
|
|
|
status = "\x038\x02Away\x02\x0f"
|
2013-07-22 20:53:17 +02:00
|
|
|
elif status == "Busy":
|
2013-07-22 16:01:47 +02:00
|
|
|
status = "\x035\x02Busy\x02\x0f"
|
2013-07-22 20:53:17 +02:00
|
|
|
elif "Looking to" in status:
|
2013-07-22 16:01:47 +02:00
|
|
|
status = "\x036\x02%s\x02\x0f" % status
|
2013-08-14 10:20:24 +02:00
|
|
|
out += " (%s)" % status
|
|
|
|
|
|
|
|
for i in soup.findAll('div', {'class': 'panel'}):
|
|
|
|
if str(i.find('div', {'class': 'panel-heading'})) == '<div class="panel-heading">Markdown</div>':
|
|
|
|
data = i
|
|
|
|
data = data.findAll('p')[1:]
|
2013-09-05 10:50:41 +02:00
|
|
|
print data
|
2013-08-14 10:20:24 +02:00
|
|
|
money = data[0].text.split(" ")[-1]
|
|
|
|
totalgames = data[1].text.split(" ")[-1]
|
2013-09-05 10:50:41 +02:00
|
|
|
time = data[2].text.split(" ")[-1].replace("h", "").replace(",", "")
|
|
|
|
time = str(int(round(float(time))))
|
|
|
|
out += " This account is worth \x02{}\x02, and they've spent \x02{}\x02 hour(s) playing games! ".format(money, time)
|
|
|
|
out += "They have \x02{} games\x02 - {}".format(totalgames, web.try_isgd(url))
|
2013-07-31 15:10:48 +02:00
|
|
|
|
2013-07-22 20:53:17 +02:00
|
|
|
if not dontsave:
|
2013-07-22 16:01:47 +02:00
|
|
|
db.execute("insert or replace into steam(nick, acc) values (?,?)", (nick.lower(), inp))
|
|
|
|
db.commit()
|
2013-07-31 15:10:48 +02:00
|
|
|
|
2013-09-05 10:50:41 +02:00
|
|
|
return out
|
2013-07-31 15:10:48 +02:00
|
|
|
|
|
|
|
|
2013-08-01 12:07:20 +02:00
|
|
|
def get_steam_info(url):
|
2013-07-31 15:10:48 +02:00
|
|
|
# we get the soup manually because the steam pages have some odd encoding troubles
|
2013-08-01 12:07:20 +02:00
|
|
|
page = http.get(url)
|
2013-07-31 15:10:48 +02:00
|
|
|
soup = BeautifulSoup(page, 'lxml', from_encoding="utf-8")
|
|
|
|
|
|
|
|
name = soup.find('div', {'class': 'apphub_AppName'}).text
|
2013-08-03 05:18:35 +02:00
|
|
|
desc = ": " + text.truncate_str(soup.find('div', {'class': 'game_description_snippet'}).text.strip())
|
2013-07-31 15:10:48 +02:00
|
|
|
|
|
|
|
# the page has a ton of returns and tabs
|
|
|
|
details = soup.find('div', {'class': 'glance_details'}).text.strip().split(u"\n\n\r\n\t\t\t\t\t\t\t\t\t")
|
2013-08-03 05:18:35 +02:00
|
|
|
genre = " - Genre: " + details[0].replace(u"Genre: ", u"")
|
|
|
|
date = " - Release date: " + details[1].replace(u"Release Date: ", u"")
|
|
|
|
price = ""
|
|
|
|
if not "Free to Play" in genre:
|
|
|
|
price = " - Price: " + soup.find('div', {'class': 'game_purchase_price price'}).text.strip()
|
2013-07-31 15:10:48 +02:00
|
|
|
|
2013-08-03 05:18:35 +02:00
|
|
|
return name + desc + genre + date + price
|
2013-08-01 12:07:20 +02:00
|
|
|
|
|
|
|
|
|
|
|
@hook.regex(*steam_re)
|
|
|
|
def steam_url(match):
|
|
|
|
return get_steam_info("http://store.steampowered.com" + match.group(4))
|
|
|
|
|
|
|
|
|
|
|
|
@hook.command
|
2013-08-01 12:17:41 +02:00
|
|
|
def steam(inp):
|
|
|
|
"""steam [search] - Search for specified game/trailer/DLC"""
|
2013-08-01 12:07:20 +02:00
|
|
|
page = http.get("http://store.steampowered.com/search/?term=" + inp)
|
|
|
|
soup = BeautifulSoup(page, 'lxml', from_encoding="utf-8")
|
|
|
|
result = soup.find('a', {'class': 'search_result_row'})
|
|
|
|
return get_steam_info(result['href']) + " - " + web.isgd(result['href'])
|