remove time limit and fix exception
This commit is contained in:
parent
ada9acfcd0
commit
c634e9dfc9
u2helper
|
@ -23,9 +23,13 @@ target_dir_parent = config["target_dir_parent"]
|
|||
target_dir = {"Lossless Music": target_dir_parent + u"/音乐/",
|
||||
"BDISO": target_dir_parent + u"/动漫/",
|
||||
"BDrip": target_dir_parent + u"/动漫/",
|
||||
"U2-RBD": target_dir_parent + u"/动漫/",
|
||||
"U2-Rip": target_dir_parent + u"/动漫/",
|
||||
u"加流重灌": target_dir_parent + u"/动漫/",
|
||||
u"外挂结构": target_dir_parent + u"/字幕/",
|
||||
"Others": target_dir_parent + u"/其他/",
|
||||
"DVDrip": target_dir_parent + u"/动漫/",
|
||||
"HDTVrip": target_dir_parent + u"/动漫/",
|
||||
"DVDISO": target_dir_parent + u"/动漫/"}
|
||||
|
||||
headers = {'X-Transmission-Session-Id': '',
|
||||
|
@ -36,7 +40,7 @@ list_payload = '''{"method": "torrent-get", "arguments": {
|
|||
|
||||
r = requests.post(url, headers=headers, data=list_payload, verify=False)
|
||||
|
||||
soup = BeautifulSoup(r.text)
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
code = soup.find("code")
|
||||
headers['X-Transmission-Session-Id'] = code.text.split(': ')[1]
|
||||
|
||||
|
@ -62,10 +66,11 @@ for torrent in result["arguments"]["torrents"]:
|
|||
else:
|
||||
location_payload = '''{"method": "torrent-set-location", "arguments": {"move": true, "location": "''' + \
|
||||
target_dir[seeding["catalog"]].encode('utf8') + \
|
||||
seeding["name"].encode('utf8').replace('/', '/') + '''", "ids": [''' + \
|
||||
seeding["name"].encode('utf8').replace('/', '/').replace(':', ':') \
|
||||
+ '''", "ids": [''' + \
|
||||
str(torrent["id"]) + ''']}}'''
|
||||
print location_payload
|
||||
r = requests.post(url, headers=headers, data=location_payload, verify=False)
|
||||
print r.text
|
||||
time.sleep(1)
|
||||
# time.sleep(1)
|
||||
break
|
||||
|
|
|
@ -25,7 +25,7 @@ cookies = dict(
|
|||
|
||||
r = requests.get(url, cookies=cookies)
|
||||
|
||||
soup = BeautifulSoup(r.text)
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
|
||||
td_list = soup.find_all('td', {'class': 'rowfollow nowrap'})
|
||||
|
||||
|
@ -52,10 +52,11 @@ for td, table in itertools.izip(td_list, table_list):
|
|||
|
||||
u2torrent.id = int(table.find('a').get('href').split('&')[0].split('=')[1])
|
||||
|
||||
info_r = requests.get(info_url + str(u2torrent.id), cookies=cookies)
|
||||
print info_url + str(u2torrent.id)
|
||||
|
||||
try:
|
||||
info_soup = BeautifulSoup(info_r.text)
|
||||
info_r = requests.get(info_url + str(u2torrent.id), cookies=cookies)
|
||||
info_soup = BeautifulSoup(info_r.text, "html.parser")
|
||||
|
||||
info_name = info_soup.find("span", {'class': 'title'}, text="[name]").parent.find("span", {'class': 'value'})
|
||||
u2torrent.folder = info_name.text
|
||||
|
@ -66,9 +67,10 @@ for td, table in itertools.izip(td_list, table_list):
|
|||
|
||||
torrents.append(json.JSONDecoder().decode(u2torrent.json()))
|
||||
count += 1
|
||||
except AttributeError:
|
||||
except Exception as e:
|
||||
print str(e)
|
||||
print "Fetch folder name failed: " + u2torrent.title
|
||||
time.sleep(3)
|
||||
# time.sleep(3)
|
||||
|
||||
torrents_dict["count"] = count
|
||||
torrents_dict["torrents"] = torrents
|
||||
|
|
Loading…
Reference in New Issue