Created
January 20, 2026 22:39
-
-
Save SerJaimeLannister/e5ed97580bffb4362b551170613007cb to your computer and use it in GitHub Desktop.
serververify-tampermonkey-script
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| // ==UserScript== | |
| // @name ServerVerify Value Index Scraper | |
| // @namespace http://tampermonkey.net/ | |
| // @version 1.0 | |
| // @description Scrape ServerVerify Value Index data across all pages | |
| // @author You | |
| // @match https://serververify.com/benchmarks/value-index* | |
| // @grant none | |
| // ==/UserScript== | |
| (function() { | |
| 'use strict'; | |
| // Add scraper button to the page | |
| function addScraperButton() { | |
| const button = document.createElement('button'); | |
| button.innerHTML = '📊 Scrape All Pages'; | |
| button.style.cssText = ` | |
| position: fixed; | |
| top: 20px; | |
| right: 20px; | |
| z-index: 10000; | |
| padding: 12px 24px; | |
| background: #16A34A; | |
| color: white; | |
| border: none; | |
| border-radius: 8px; | |
| font-weight: bold; | |
| cursor: pointer; | |
| box-shadow: 0 4px 6px rgba(0,0,0,0.1); | |
| `; | |
| button.onmouseover = () => button.style.background = '#15803D'; | |
| button.onmouseout = () => button.style.background = '#16A34A'; | |
| button.onclick = startScraping; | |
| document.body.appendChild(button); | |
| } | |
| // Extract data from current page | |
| function extractPageData() { | |
| const data = []; | |
| const rows = document.querySelectorAll('table tbody tr'); | |
| rows.forEach(row => { | |
| const cells = row.querySelectorAll('td'); | |
| if (cells.length >= 8) { | |
| const entry = { | |
| hostingProvider: cells[0].textContent.trim(), | |
| cpu: cells[1].textContent.trim(), | |
| storage: cells[2].textContent.trim(), | |
| ram: cells[3].textContent.trim(), | |
| monthlyPrice: cells[4].textContent.trim(), | |
| valueScore: cells[5].textContent.trim().replace(/\s+/g, ''), | |
| performanceScore: cells[6].textContent.trim(), | |
| benchmarkUrl: cells[7].querySelector('a')?.href || '' | |
| }; | |
| data.push(entry); | |
| } | |
| }); | |
| return data; | |
| } | |
| // Convert data to CSV | |
| function convertToCSV(data) { | |
| if (data.length === 0) return ''; | |
| const headers = Object.keys(data[0]); | |
| const csvRows = [headers.join(',')]; | |
| data.forEach(row => { | |
| const values = headers.map(header => { | |
| const value = row[header].toString(); | |
| // Escape quotes and wrap in quotes if contains comma | |
| const escaped = value.replace(/"/g, '""'); | |
| return value.includes(',') ? `"${escaped}"` : escaped; | |
| }); | |
| csvRows.push(values.join(',')); | |
| }); | |
| return csvRows.join('\n'); | |
| } | |
| // Download CSV file | |
| function downloadCSV(csv, filename) { | |
| const blob = new Blob([csv], { type: 'text/csv;charset=utf-8;' }); | |
| const link = document.createElement('a'); | |
| const url = URL.createObjectURL(blob); | |
| link.setAttribute('href', url); | |
| link.setAttribute('download', filename); | |
| link.style.visibility = 'hidden'; | |
| document.body.appendChild(link); | |
| link.click(); | |
| document.body.removeChild(link); | |
| } | |
| // Show progress indicator | |
| function showProgress(current, total) { | |
| let progressDiv = document.getElementById('scraper-progress'); | |
| if (!progressDiv) { | |
| progressDiv = document.createElement('div'); | |
| progressDiv.id = 'scraper-progress'; | |
| progressDiv.style.cssText = ` | |
| position: fixed; | |
| top: 80px; | |
| right: 20px; | |
| z-index: 10000; | |
| padding: 16px 24px; | |
| background: white; | |
| border: 2px solid #16A34A; | |
| border-radius: 8px; | |
| box-shadow: 0 4px 6px rgba(0,0,0,0.1); | |
| font-weight: bold; | |
| min-width: 200px; | |
| `; | |
| document.body.appendChild(progressDiv); | |
| } | |
| const percentage = Math.round((current / total) * 100); | |
| progressDiv.innerHTML = ` | |
| <div style="margin-bottom: 8px;">Scraping Progress</div> | |
| <div style="color: #16A34A;">Page ${current} of ${total}</div> | |
| <div style="margin-top: 8px; background: #e5e7eb; height: 8px; border-radius: 4px; overflow: hidden;"> | |
| <div style="background: #16A34A; height: 100%; width: ${percentage}%;"></div> | |
| </div> | |
| `; | |
| } | |
| function removeProgress() { | |
| const progressDiv = document.getElementById('scraper-progress'); | |
| if (progressDiv) progressDiv.remove(); | |
| } | |
| // Main scraping function | |
| async function startScraping() { | |
| const totalPages = 44; | |
| const allData = []; | |
| const baseUrl = 'https://serververify.com/benchmarks/value-index'; | |
| // Get type parameter from current URL | |
| const urlParams = new URLSearchParams(window.location.search); | |
| const type = urlParams.get('type') || '1'; | |
| console.log('Starting scrape...'); | |
| for (let page = 1; page <= totalPages; page++) { | |
| showProgress(page, totalPages); | |
| try { | |
| let pageData; | |
| if (page === 1) { | |
| // Use current page data | |
| pageData = extractPageData(); | |
| } else { | |
| // Fetch other pages | |
| const url = `${baseUrl}?type=${type}&page=${page}`; | |
| const response = await fetch(url); | |
| const html = await response.text(); | |
| // Parse HTML | |
| const parser = new DOMParser(); | |
| const doc = parser.parseFromString(html, 'text/html'); | |
| // Extract data from parsed document | |
| const rows = doc.querySelectorAll('table tbody tr'); | |
| pageData = []; | |
| rows.forEach(row => { | |
| const cells = row.querySelectorAll('td'); | |
| if (cells.length >= 8) { | |
| const entry = { | |
| hostingProvider: cells[0].textContent.trim(), | |
| cpu: cells[1].textContent.trim(), | |
| storage: cells[2].textContent.trim(), | |
| ram: cells[3].textContent.trim(), | |
| monthlyPrice: cells[4].textContent.trim(), | |
| valueScore: cells[5].textContent.trim().replace(/\s+/g, ''), | |
| performanceScore: cells[6].textContent.trim(), | |
| benchmarkUrl: cells[7].querySelector('a')?.href || '' | |
| }; | |
| pageData.push(entry); | |
| } | |
| }); | |
| } | |
| allData.push(...pageData); | |
| console.log(`Page ${page}: ${pageData.length} entries scraped`); | |
| // Small delay to avoid overwhelming the server | |
| await new Promise(resolve => setTimeout(resolve, 500)); | |
| } catch (error) { | |
| console.error(`Error scraping page ${page}:`, error); | |
| } | |
| } | |
| removeProgress(); | |
| // Generate CSV and download | |
| const csv = convertToCSV(allData); | |
| const timestamp = new Date().toISOString().split('T')[0]; | |
| const filename = `serververify-value-index-type${type}-${timestamp}.csv`; | |
| downloadCSV(csv, filename); | |
| console.log(`Scraping complete! Total entries: ${allData.length}`); | |
| alert(`Scraping complete!\nTotal entries: ${allData.length}\nFile downloaded: ${filename}`); | |
| // Also save as JSON for reference | |
| const json = JSON.stringify(allData, null, 2); | |
| const jsonBlob = new Blob([json], { type: 'application/json' }); | |
| const jsonLink = document.createElement('a'); | |
| jsonLink.href = URL.createObjectURL(jsonBlob); | |
| jsonLink.download = `serververify-value-index-type${type}-${timestamp}.json`; | |
| jsonLink.click(); | |
| } | |
| // Initialize when page loads | |
| if (document.readyState === 'loading') { | |
| document.addEventListener('DOMContentLoaded', addScraperButton); | |
| } else { | |
| addScraperButton(); | |
| } | |
| })(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment