amazon.py 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103
  1. from bs4 import BeautifulSoup
  2. import requests
  3. import time
  4. from bot_buy import Messenger
  5. import random
  6. ### lists.txt format
  7. ### url,expected price,product name
  8. def get_user_agent():
  9. agents = []
  10. agents.append('Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148')
  11. agents.append('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36')
  12. agents.append('Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148')
  13. agents.append('Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)')
  14. agents.append('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/17.17134')
  15. agents.append('Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko')
  16. agents.append('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/605.1.15 (KHTML, like Gecko)')
  17. agents.append('Mozilla/5.0 (Windows NT 5.1; rv:7.0.1) Gecko/20100101 Firefox/7.0.1')
  18. return random.choice(agents)
  19. url = ""
  20. m = Messenger()
  21. while (1):
  22. with open('list-amazon.txt', 'r') as f:
  23. lines = f.readlines()
  24. for l in lines:
  25. url = l.split(',')[0].rstrip()
  26. price = float(l.split(',')[1].rstrip())
  27. prod = l.split(',')[2].rstrip()
  28. headers = {
  29. "User-Agent": get_user_agent(),
  30. "Connection": "keep-alive",
  31. 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
  32. "Accept-Encoding": "gzip, deflate, br",
  33. "Accept-Language": "en-US,en;q=0.5",
  34. "Host": "www.amazon.com.br"
  35. }
  36. try:
  37. s = requests.Session()
  38. amaz = s.get("https://www.amazon.com.br", headers=headers)
  39. soup = BeautifulSoup(amaz.text, 'html.parser')
  40. except:
  41. print('failed to request')
  42. continue
  43. a_last = soup.findAll('p', class_="a-last")
  44. skip = False
  45. for s in a_last:
  46. text = s.get_text()
  47. print(text)
  48. if text > "Our servers are gettin":
  49. print("Skipping try")
  50. skip = True
  51. if skip:
  52. continue
  53. try:
  54. r = s.get(url, headers=headers, cookies=amaz.cookies)
  55. except:
  56. print('failed to request')
  57. continue
  58. soup = BeautifulSoup(r.text, 'html.parser')
  59. #<p class="a-last">
  60. a_last = soup.findAll('p', class_="a-last")
  61. skip = False
  62. for s in a_last:
  63. text = s.get_text()
  64. print(text)
  65. if text > "Our servers are gettin":
  66. print("Skipping try")
  67. skip = True
  68. if skip:
  69. continue
  70. spans = []
  71. price_normal = soup.find(id="priceblock_ourprice")
  72. price_offer = soup.find(id="priceblock_dealprice")
  73. spans.append(price_normal)
  74. spans.append(price_offer)
  75. for s in spans:
  76. if s:
  77. new_price = float(s.get_text().split('$')[1].replace(".","").replace(',', '.'))
  78. print(f"Produto: {prod} -> Esperado: {price} -> Agora: {new_price}")
  79. if new_price <= price:
  80. m.sendMessage(f"Got it! {prod} -> {new_price}")
  81. time.sleep(10)
  82. time.sleep(120)