Skip to content

Instantly share code, notes, and snippets.

@zone559
Created July 23, 2025 01:15
Show Gist options
  • Select an option

  • Save zone559/f9c1491739d42bacf9e53a674e978643 to your computer and use it in GitHub Desktop.

Select an option

Save zone559/f9c1491739d42bacf9e53a674e978643 to your computer and use it in GitHub Desktop.
import requests
import json
import time
base_url = "https://leakedzone.com/voulezj"
page = 1
empty_response_count = 0 # To prevent infinite loops
max_empty_responses = 3 # Stop after this many empty responses
headers = {
"authority": "leakedzone.com",
"accept": "*/*",
"accept-encoding": "identity",
"accept-language": "en-US,en;q=0.9",
"referer": "https://leakedzone.com/",
"sec-fetch-site": "same-origin",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36",
"x-requested-with": "XMLHttpRequest"
}
params = {
"type": "all",
"order": "0"
}
while True:
params["page"] = str(page)
try:
response = requests.get(base_url, headers=headers, params=params)
response.raise_for_status()
try:
json_data = response.json()
if not json_data: # Check if response is empty
print(f"Page {page}: Empty response received")
empty_response_count += 1
if empty_response_count >= max_empty_responses:
print(f"Stopping after {max_empty_responses} empty responses")
break
else:
print(f"Page {page}: Received {len(json_data)} items")
print(json.dumps(json_data, indent=2))
empty_response_count = 0 # Reset counter if we get data
except ValueError:
print(f"Page {page}: Response is not JSON")
print("Raw response:", response.text)
break
except requests.exceptions.RequestException as e:
print(f"Page {page}: Request failed - {e}")
break
page += 1
time.sleep(1) # Be polite with delay between requests
print("Finished scraping")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment