Skip to content

Instantly share code, notes, and snippets.

@rxerium
Created February 17, 2025 11:10
Show Gist options
  • Select an option

  • Save rxerium/e602302ee9405d2eeb0115eeb36096ac to your computer and use it in GitHub Desktop.

Select an option

Save rxerium/e602302ee9405d2eeb0115eeb36096ac to your computer and use it in GitHub Desktop.
This script performs bulk reverse WHOIS lookups using the WhoisFreaks API to retrieve domain registration details for a list of companies, saving the results in a JSON file while implementing rate limiting to avoid API restrictions.
import requests
import json
import time
# Configuration
API_KEY = "API_KEY" # Replace with your actual API key
INPUT_FILE = "companies.txt" # File containing company names (one per line)
OUTPUT_FILE = "output2.json" # JSON output file
RATE_LIMIT_SECONDS = 60 # Wait time between requests to avoid rate limiting
# Function to query WhoisFreaks API
def get_whois_data(company_name):
url = f"https://api.whoisfreaks.com/v1.0/whois?apiKey={API_KEY}&whois=reverse&company={company_name}"
response = requests.get(url)
if response.status_code == 200:
return response.json()
else:
return {"error": f"Failed to fetch data for {company_name}", "status_code": response.status_code}
# Read company names from file
with open(INPUT_FILE, "r") as file:
companies = [line.strip() for line in file if line.strip()]
# Query API for each company and store results
results = {}
for company in companies:
results[company] = get_whois_data(company)
print(f"Processed: {company}, waiting {RATE_LIMIT_SECONDS} seconds before next request...")
time.sleep(RATE_LIMIT_SECONDS) # Wait to avoid rate limiting
# Save results to JSON file
with open(OUTPUT_FILE, "w") as json_file:
json.dump(results, json_file, indent=4)
print(f"Whois data saved to {OUTPUT_FILE}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment