app.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122
  1. from bs4 import BeautifulSoup
  2. from dotenv import load_dotenv
  3. import requests
  4. from typing import List, TypeAlias
  5. from dataclasses import dataclass
  6. import os
  7. import re
  8. import pickle
  9. import urllib.parse
  10. @dataclass
  11. class Torrent:
  12. name: str
  13. link: str
  14. season_ep: str
  15. season: str
  16. load_dotenv()
  17. cookies = {
  18. "cf_clereance": os.getenv('CF_CLEREANCE') or "",
  19. "pass": os.getenv("PASS") or "",
  20. "uid": os.getenv("UID") or ""
  21. }
  22. def search_series(serie_name: str) -> List[Torrent] | None:
  23. try:
  24. torrents = []
  25. page=1
  26. valid = True
  27. while valid:
  28. valid = False
  29. base_url = "https://iptorrents.com/"
  30. search_url = f"{base_url}t?q={urllib.parse.quote(serie_name)};p={page}"
  31. response = requests.get(search_url, cookies=cookies)
  32. response.raise_for_status()
  33. serie_name = serie_name.replace(' ', '.')
  34. expr = re.compile(f"{serie_name}.*((S[0-9]+)E[0-9]+).*(1080p).*", re.I)
  35. soup = BeautifulSoup(response.content, 'html.parser')
  36. results = soup.find_all('a', {'class': 'tTipWrap'})
  37. for result in results:
  38. link = result['href']
  39. m = expr.search(link)
  40. if link.endswith('.torrent') and m is not None:
  41. torrents.append(Torrent(serie_name, link, m.group(1), season=m.group(2)))
  42. valid = True
  43. if valid:
  44. page += 1
  45. return torrents
  46. except Exception as e:
  47. print(f"ERROR: {e}")
  48. return None
  49. OK: TypeAlias = int
  50. def download_torrent(url: str, file_name: str) -> OK | None:
  51. try:
  52. read = requests.get(url, cookies=cookies)
  53. with open(file_name, 'wb') as f:
  54. for chunk in read.iter_content(chunk_size=512):
  55. if chunk:
  56. f.write(chunk)
  57. return 1
  58. except Exception as e:
  59. print("ERROR: {e}")
  60. return None
  61. def main() -> None:
  62. if os.path.exists("downloaded.bin"):
  63. with open("downloaded.bin", "rb") as f:
  64. downloaded: List[Torrent] = pickle.load(f)
  65. else:
  66. downloaded = list()
  67. serie_name = input("Name serie: ")
  68. torrents = search_series(serie_name)
  69. if torrents is None:
  70. exit(downloaded)
  71. return
  72. season = ""
  73. if len(torrents) <= 0:
  74. print("torrent not found")
  75. exit(downloaded)
  76. return
  77. seasons = set()
  78. for to in torrents:
  79. seasons.add(to.season)
  80. for idx, s in enumerate(seasons):
  81. print(f"{idx + 1}, {s}")
  82. choice = int(input("What season? "))
  83. if 0 <= (choice -1 ) <= len(seasons):
  84. season = list(seasons)[choice -1]
  85. print(f"Selected season: {season}")
  86. for torrent in torrents:
  87. if torrent.link.find(season) >=0:
  88. name = os.path.basename(torrent.link)
  89. got = list(filter(lambda x: (x.name in torrent.name and x.season_ep in torrent.season_ep), downloaded))
  90. if len(got) == 0:
  91. downloaded.append(torrent)
  92. print(f"Downloading {torrent.season} - {torrent.season_ep}")
  93. if download_torrent(f"https://iptorrents.com/t/{torrent.link}", name) is None:
  94. print(f"Failed to download {torrent.link}")
  95. else:
  96. print("Escolha invalida")
  97. exit(downloaded)
  98. def exit(downloaded: List[Torrent]):
  99. with open("downloaded.bin", 'wb') as f:
  100. pickle.dump(downloaded, f, protocol=pickle.HIGHEST_PROTOCOL)
  101. if __name__ == '__main__':
  102. main()