123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103 |
- from bs4 import BeautifulSoup
- import requests
- import time
- from bot_buy import Messenger
- import random
- ### lists.txt format
- ### url,expected price,product name
- def get_user_agent():
- agents = []
- agents.append('Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148')
- agents.append('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36')
- agents.append('Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148')
- agents.append('Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)')
- agents.append('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/17.17134')
- agents.append('Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko')
- agents.append('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/605.1.15 (KHTML, like Gecko)')
- agents.append('Mozilla/5.0 (Windows NT 5.1; rv:7.0.1) Gecko/20100101 Firefox/7.0.1')
- return random.choice(agents)
- url = ""
- m = Messenger()
- while (1):
- with open('list-amazon.txt', 'r') as f:
- lines = f.readlines()
- for l in lines:
- url = l.split(',')[0].rstrip()
- price = float(l.split(',')[1].rstrip())
- prod = l.split(',')[2].rstrip()
-
- headers = {
- "User-Agent": get_user_agent(),
- "Connection": "keep-alive",
- 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
- "Accept-Encoding": "gzip, deflate, br",
- "Accept-Language": "en-US,en;q=0.5",
- "Host": "www.amazon.com.br"
- }
- try:
- s = requests.Session()
- amaz = s.get("https://www.amazon.com.br", headers=headers)
- soup = BeautifulSoup(amaz.text, 'html.parser')
- except:
- print('failed to request')
- continue
- a_last = soup.findAll('p', class_="a-last")
- skip = False
- for s in a_last:
- text = s.get_text()
- print(text)
- if text > "Our servers are gettin":
- print("Skipping try")
- skip = True
- if skip:
- continue
- try:
- r = s.get(url, headers=headers, cookies=amaz.cookies)
- except:
- print('failed to request')
- continue
-
- soup = BeautifulSoup(r.text, 'html.parser')
- #<p class="a-last">
- a_last = soup.findAll('p', class_="a-last")
- skip = False
- for s in a_last:
- text = s.get_text()
- print(text)
- if text > "Our servers are gettin":
- print("Skipping try")
- skip = True
- if skip:
- continue
-
-
- spans = []
- price_normal = soup.find(id="priceblock_ourprice")
- price_offer = soup.find(id="priceblock_dealprice")
- spans.append(price_normal)
- spans.append(price_offer)
-
- for s in spans:
- if s:
- new_price = float(s.get_text().split('$')[1].replace(".","").replace(',', '.'))
- print(f"Produto: {prod} -> Esperado: {price} -> Agora: {new_price}")
- if new_price <= price:
- m.sendMessage(f"Got it! {prod} -> {new_price}")
- time.sleep(10)
- time.sleep(120)
|