6 Commits

Author SHA1 Message Date
df1a60034c try cookies 2025-12-07 18:57:34 +01:00
7094801549 minimum 2025-12-07 18:48:19 +01:00
389d10615e try debug 2025-12-07 18:17:10 +01:00
62b16f1dbf try2 2025-12-07 18:13:15 +01:00
09662e656e try1 2025-12-07 17:59:36 +01:00
b26a6a2d0a Better versionning 2025-12-05 16:46:44 +01:00
5 changed files with 77 additions and 23 deletions

2
.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
.venv
__pycache__/

View File

@@ -4,6 +4,7 @@ RUN apk add --no-cache ca-certificates
WORKDIR /app
COPY VERSION /VERSION
COPY /app/ /app/
RUN pip install --no-cache-dir -r requirements.txt

1
VERSION Normal file
View File

@@ -0,0 +1 @@
4.0.3

View File

@@ -4,7 +4,9 @@ import logging
import json
import sys
VERSION = "4.0.2"
# Read version from VERSION file
with open(os.path.join(os.path.dirname(os.path.dirname(__file__)), "VERSION"), "r", encoding="utf-8") as f:
VERSION = f.read().strip()
# Logger setup
logging.basicConfig(
@@ -77,22 +79,23 @@ if match:
else:
wh_masked_url = "[Invalid webhook URL]"
# HTTP headers
# HTTP headers - Firefox working set
HEADERS = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
"Accept": "application/json, text/plain, */*",
"Accept-Language": "fr-FR,fr;q=0.9,en-US;q=0.8,en;q=0.7",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:145.0) Gecko/20100101 Firefox/145.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3",
"Accept-Encoding": "gzip, deflate, br, zstd",
"Referer": "https://partners.nvidia.com/",
"Origin": "https://partners.nvidia.com",
"Connection": "keep-alive",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Ch-Ua": "\"Google Chrome\";v=\"131\", \"Chromium\";v=\"131\", \"Not.A/Brand\";v=\"24\"",
"Sec-Ch-Ua-Platform": "\"macOS\"",
"Cache-Control": "no-cache, no-store, must-revalidate",
"Cache-Control": "no-cache",
"Pragma": "no-cache",
"Expires": "0"
"Sec-GPC": "1",
"Connection": "keep-alive",
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "none",
"Sec-Fetch-User": "?1",
"Priority": "u=0, i",
"TE": "trailers"
}
# Load country setting and localization config

View File

@@ -1,14 +1,16 @@
import requests
import logging
import time
import random
from env_config import HEADERS, PRODUCT_NAMES, API_URL_SKU, API_URL_STOCK, PRODUCT_URL
from notifier import send_discord_notification, send_out_of_stock_notification, send_sku_change_notification
from requests.adapters import HTTPAdapter, Retry
# HTTP session
# HTTP session with stealth configuration
session = requests.Session()
retries = Retry(total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
session.mount('https://', HTTPAdapter(max_retries=retries))
retries = Retry(total=2, backoff_factor=3, status_forcelist=[500, 502, 503, 504, 429])
adapter = HTTPAdapter(max_retries=retries, pool_connections=1, pool_maxsize=1)
session.mount('https://', adapter)
session.headers.update(HEADERS)
# Keeping memory of last run
@@ -20,15 +22,53 @@ first_run_dict = {name: True for name in PRODUCT_NAMES}
def check_rtx_50_founders():
global last_sku_dict, global_stock_status_dict, first_run_dict
# First get Akamai cookie by visiting main site
try:
logging.info("Getting Akamai protection cookie...")
session.get("https://marketplace.nvidia.com/fr-fr/consumer/graphics-cards/", timeout=10)
time.sleep(1) # Let the session establish
except Exception as e:
logging.warning(f"Failed to get initial cookie: {e}")
# Fetching nvidia API data
try:
cache_buster = int(time.time() * 1000)
sku_url = f"{API_URL_SKU}&_t={cache_buster}"
sku_url = API_URL_SKU
response = session.get(sku_url, timeout=10)
logging.info(f"SKU API response: {response.status_code}")
if response.status_code == 429:
logging.warning("Rate limited, waiting longer...")
time.sleep(random.uniform(10, 20))
return
response.raise_for_status()
# Debug response content
logging.info(f"Content-Type: {response.headers.get('Content-Type')}")
logging.info(f"Content-Length: {response.headers.get('Content-Length')}")
logging.info(f"Response text length: {len(response.text)}")
logging.info(f"Response content (first 300 chars): {response.text[:300]}")
# Check if content looks like JSON
if not response.text.strip().startswith('{'):
logging.error("Response doesn't start with '{' - not JSON!")
logging.error(f"Full response: {response.text}")
return
try:
data = response.json()
except Exception as e:
logging.error(f"JSON decode error: {e}")
logging.error(f"Full response text: {response.text}")
return
except requests.exceptions.ReadTimeout:
logging.error("Read timeout - IP may be rate limited/blocked. Try changing IP or wait several hours.")
return
except requests.exceptions.ConnectionError as e:
if "Failed to resolve" in str(e):
logging.error("DNS resolution failed - IP may be DNS blacklisted. Try VPN or different DNS servers.")
else:
logging.error(f"Connection error: {e}")
return
except requests.exceptions.RequestException as e:
logging.error(f"SKU API error: {e}")
return
@@ -39,12 +79,17 @@ def check_rtx_50_founders():
for product_name in PRODUCT_NAMES:
product_details = None
for p in all_products:
if p.get("gpu", "").strip() == product_name:
gpu_name = p.get("gpu", "").strip()
# Flexible matching: exact match or partial match
if gpu_name == product_name or product_name in gpu_name:
product_details = p
break
if not product_details:
logging.warning(f"⚠️ No product with GPU '{product_name}' found.")
# Debug: show available GPU names for troubleshooting
available_gpus = set(p.get("gpu", "") for p in all_products if p.get("gpu"))
logging.info(f"Available GPUs: {sorted(list(available_gpus))[:10]}") # Show first 10
continue
product_sku = product_details['productSKU']
@@ -62,13 +107,15 @@ def check_rtx_50_founders():
first_run_dict[product_name] = False
# Check product availability in API_URL_STOCK for each SKU
cache_buster = int(time.time() * 1000)
api_stock_url = f"{API_URL_STOCK}{product_sku}&_t={cache_buster}"
api_stock_url = f"{API_URL_STOCK}{product_sku}"
logging.info(f"[{product_name}] Checking stock: {api_stock_url}")
try:
response = session.get(api_stock_url, timeout=10)
logging.info(f"[{product_name}] Stock API response: {response.status_code}")
if response.status_code == 429:
logging.warning(f"[{product_name}] Rate limited, skipping...")
continue
response.raise_for_status()
stock_data = response.json()
except requests.exceptions.RequestException as e: