Last active
August 13, 2025 21:39
-
-
Save LuQQiu/0c010fd9ac5723723dfb32211cccd994 to your computer and use it in GitHub Desktop.
Reddit Comment Dataset
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| """ | |
| Multi-process FTS (Full-Text Search) benchmark for LanceDB | |
| Benchmarks FTS queries on the 600M row Reddit comments dataset. | |
| Requires two text files: | |
| - words.txt: List of words for FTS queries (one per line) | |
| - subreddits.txt: List of subreddits for filters (one per line) | |
| Each query uses exactly the specified number of words and filters. | |
| Filters are applied sequentially in a fixed order for consistency. | |
| """ | |
| import argparse | |
| import multiprocessing as mp | |
| import time | |
| import json | |
| import random | |
| import re | |
| from typing import List, Dict, Any, Optional, Tuple | |
| from collections import Counter | |
| from dataclasses import dataclass, asdict | |
| import numpy as np | |
| import lancedb | |
| import pyarrow as pa | |
| # Force spawn method for multiprocessing | |
| mp.set_start_method("spawn", force=True) | |
| # LanceDB connection details (from 0810/0812) | |
| LANCEDB_HOST = "http://internal-k8s-lancedb-lancedbq-09d85d35db-379805588.us-west-2.elb.amazonaws.com:80" | |
| DATABASE = "internet" | |
| TABLE_NAME = "reddit_comments" | |
| API_KEY = "sk_reevo_prod_5a4d838fe66bbef2b" | |
| REGION = "us-west-2" | |
| # Column information based on dataset documentation | |
| # 23 filter | |
| FILTER_COLUMNS = { | |
| # Temporal columns (Bitmap indexes) | |
| 'year': {'type': 'int', 'values': [2012, 2013, 2014], 'index': 'Bitmap'}, # Only 2012-2014 data | |
| 'month': {'type': 'int', 'values': list(range(1, 13)), 'index': 'Bitmap'}, | |
| 'day_of_week': {'type': 'int', 'values': list(range(0, 7)), 'index': 'Bitmap'}, | |
| 'hour': {'type': 'int', 'values': list(range(0, 24)), 'index': 'Bitmap'}, | |
| 'is_weekend': {'type': 'bool', 'values': [True, False], 'index': 'Bitmap'}, | |
| # Categorical columns (Bitmap indexes) | |
| 'subreddit': {'type': 'string', 'sample_values': None, 'index': 'Bitmap'}, # Will fetch dynamically | |
| 'score_category': {'type': 'string', 'values': ['negative', 'low', 'medium', 'high', 'very_high'], 'index': 'Bitmap'}, | |
| 'engagement_level': {'type': 'string', 'values': ['low', 'medium', 'high'], 'index': 'Bitmap'}, | |
| 'is_controversial': {'type': 'bool', 'values': [True, False], 'index': 'Bitmap'}, | |
| 'is_viral': {'type': 'bool', 'values': [True, False], 'index': 'Bitmap'}, | |
| 'is_deleted': {'type': 'bool', 'values': [True, False], 'index': 'Bitmap'}, | |
| 'is_removed': {'type': 'bool', 'values': [True, False], 'index': 'Bitmap'}, | |
| 'has_url': {'type': 'bool', 'values': [True, False], 'index': 'Bitmap'}, | |
| 'has_emoji': {'type': 'bool', 'values': [True, False], 'index': 'Bitmap'}, | |
| # Numeric columns (BTree indexes) | |
| 'score': {'type': 'int', 'range': (-1000, 10000), 'index': 'BTree'}, | |
| 'controversiality': {'type': 'int', 'values': [0, 1], 'index': 'BTree'}, | |
| 'comment_length': {'type': 'int', 'range': (0, 10000), 'index': 'BTree'}, | |
| 'word_count': {'type': 'int', 'range': (0, 2000), 'index': 'BTree'}, | |
| # Fake columns for testing different cardinalities | |
| 'fake_category_small': {'type': 'string', 'cardinality': 10, 'index': 'Bitmap'}, # category_0 to category_9 | |
| 'fake_category_medium': {'type': 'string', 'cardinality': 100, 'index': 'Bitmap'}, # category_0 to category_99 | |
| 'fake_category_large': {'type': 'string', 'cardinality': 1000, 'index': 'BTree'}, # category_0 to category_999 | |
| 'fake_category_xlarge': {'type': 'string', 'cardinality': 10000, 'index': 'BTree'}, # category_0 to category_9999 | |
| 'fake_sentiment_score': {'type': 'float', 'range': (-1.0, 1.0), 'index': 'BTree'}, # Normal distribution mean=0.1, std=0.4 | |
| } | |
| # Common query terms (will be populated from actual data) | |
| QUERY_TERMS_CACHE = [] | |
| @dataclass | |
| class QueryMetrics: | |
| """Metrics for a single query""" | |
| query_id: int | |
| process_id: int | |
| query_text: str | |
| num_words: int | |
| num_filters: int | |
| filter_types: List[str] | |
| latency_ms: float | |
| rows_returned: int | |
| error: Optional[str] = None | |
| def to_dict(self): | |
| return asdict(self) | |
| @dataclass | |
| class BenchmarkResults: | |
| """Aggregated benchmark results""" | |
| total_queries: int | |
| successful_queries: int | |
| failed_queries: int | |
| total_time_seconds: float | |
| queries_per_second: float | |
| avg_latency_ms: float | |
| p50_latency_ms: float | |
| p90_latency_ms: float | |
| p95_latency_ms: float | |
| p99_latency_ms: float | |
| avg_rows_returned: float | |
| filter_distribution: Dict[str, int] | |
| word_count_distribution: Dict[int, int] | |
| def to_dict(self): | |
| return asdict(self) | |
| def connect_lancedb(): | |
| """Connect to remote LanceDB""" | |
| return lancedb.connect( | |
| uri=f"db://{DATABASE}", | |
| host_override=LANCEDB_HOST, | |
| api_key=API_KEY, | |
| region=REGION | |
| ) | |
| def fetch_query_terms() -> List[str]: | |
| """Load all query terms from words.txt file""" | |
| global QUERY_TERMS_CACHE | |
| if QUERY_TERMS_CACHE: | |
| return QUERY_TERMS_CACHE | |
| import os | |
| # Load from simple text file | |
| if os.path.exists('words.txt'): | |
| try: | |
| with open('words.txt', 'r') as f: | |
| words = [line.strip() for line in f if line.strip()] | |
| if words: | |
| QUERY_TERMS_CACHE = words | |
| print(f"Loaded {len(words)} query words from words.txt") | |
| return words | |
| except Exception as e: | |
| print(f"Could not load words.txt: {e}") | |
| # Fallback to predefined terms if file not found | |
| print("Warning: words.txt not found, using fallback word list") | |
| fallback_words = ['reddit', 'people', 'think', 'really', 'would', 'something', 'probably', | |
| 'actually', 'pretty', 'though', 'better', 'different', 'problem', 'question', | |
| 'interesting', 'understand', 'example', 'exactly', 'reason', 'comment'] | |
| QUERY_TERMS_CACHE = fallback_words | |
| return fallback_words | |
| def fetch_subreddit_samples() -> List[str]: | |
| """Load all subreddits from subreddits.txt file""" | |
| import os | |
| # Load from simple text file | |
| if os.path.exists('subreddits.txt'): | |
| try: | |
| with open('subreddits.txt', 'r') as f: | |
| subreddits = [line.strip() for line in f if line.strip()] | |
| if subreddits: | |
| print(f"Loaded {len(subreddits)} subreddits from subreddits.txt") | |
| return subreddits | |
| except Exception as e: | |
| print(f"Could not load subreddits.txt: {e}") | |
| # Fallback to common subreddits if file not found | |
| print("Warning: subreddits.txt not found, using fallback subreddit list") | |
| fallback_subreddits = ['AskReddit', 'funny', 'pics', 'gaming', 'worldnews', | |
| 'todayilearned', 'science', 'movies', 'videos', 'music'] | |
| return fallback_subreddits | |
| def generate_fts_query(num_words: int, query_terms: List[str]) -> str: | |
| """Generate an FTS query with specified number of words - pure random selection""" | |
| if not query_terms: | |
| query_terms = fetch_query_terms() | |
| # Pure random selection of words (with replacement if needed) | |
| if num_words <= len(query_terms): | |
| selected_words = random.sample(query_terms, num_words) | |
| else: | |
| # If we need more words than available, allow repetition | |
| selected_words = [random.choice(query_terms) for _ in range(num_words)] | |
| # Simple space-separated query (FTS handles this automatically) | |
| return ' '.join(selected_words) | |
| def generate_filter_conditions(num_filters: int, subreddit_samples: List[str], sequential: bool = True) -> Tuple[str, List[str]]: | |
| """Generate filter conditions (sequential or random)""" | |
| if num_filters == 0: | |
| return "", [] | |
| available_filters = list(FILTER_COLUMNS.keys()) | |
| if sequential: | |
| # Use filters in order: temporal first, then categorical, then numeric | |
| filter_order = [ | |
| 'year', 'month', 'day_of_week', 'hour', 'is_weekend', # Temporal | |
| 'subreddit', 'score_category', 'engagement_level', # Categorical | |
| 'is_controversial', 'is_viral', 'is_deleted', 'is_removed', # Boolean | |
| 'has_url', 'has_emoji', # Boolean | |
| 'score', 'controversiality', 'comment_length', 'word_count', # Numeric | |
| 'fake_sentiment_score', # Numeric | |
| 'fake_category_small', 'fake_category_medium', # Fake categories | |
| 'fake_category_large', 'fake_category_xlarge' | |
| ] | |
| # Take first N filters in order | |
| selected_filters = [f for f in filter_order if f in available_filters][:num_filters] | |
| else: | |
| # Random selection (original behavior) | |
| selected_filters = random.sample(available_filters, min(num_filters, len(available_filters))) | |
| conditions = [] | |
| filter_types = [] | |
| for filter_col in selected_filters: | |
| col_info = FILTER_COLUMNS[filter_col] | |
| filter_types.append(f"{filter_col}({col_info['index']})") | |
| if col_info['type'] == 'bool': | |
| value = random.choice(col_info['values']) | |
| conditions.append(f"{filter_col} = {str(value).lower()}") | |
| elif col_info['type'] == 'int': | |
| if 'values' in col_info: | |
| value = random.choice(col_info['values']) | |
| conditions.append(f"{filter_col} = {value}") | |
| elif 'range' in col_info: | |
| min_val, max_val = col_info['range'] | |
| # Generate range query | |
| if random.random() > 0.5: | |
| # Single value comparison | |
| value = random.randint(min_val, max_val) | |
| op = random.choice(['>', '<', '>=', '<=', '=']) | |
| conditions.append(f"{filter_col} {op} {value}") | |
| else: | |
| # Range query | |
| val1 = random.randint(min_val, max_val) | |
| val2 = random.randint(min_val, max_val) | |
| if val1 > val2: | |
| val1, val2 = val2, val1 | |
| conditions.append(f"{filter_col} >= {val1} AND {filter_col} <= {val2}") | |
| elif col_info['type'] == 'float': | |
| min_val, max_val = col_info['range'] | |
| value = random.uniform(min_val, max_val) | |
| op = random.choice(['>', '<', '>=', '<=']) | |
| conditions.append(f"{filter_col} {op} {value:.2f}") | |
| elif col_info['type'] == 'string': | |
| if filter_col == 'subreddit' and subreddit_samples: | |
| value = random.choice(subreddit_samples) | |
| conditions.append(f"{filter_col} = '{value}'") | |
| elif 'values' in col_info: | |
| value = random.choice(col_info['values']) | |
| conditions.append(f"{filter_col} = '{value}'") | |
| elif 'cardinality' in col_info: | |
| # For fake categories - use correct format: category_0, category_1, etc. | |
| value = f"category_{random.randint(0, col_info['cardinality']-1)}" | |
| conditions.append(f"{filter_col} = '{value}'") | |
| # Combine conditions with AND | |
| where_clause = " AND ".join(conditions) | |
| return where_clause, filter_types | |
| def run_single_fts_query(table, query_id: int, process_id: int, num_words: int, | |
| num_filters: int, query_terms: List[str], | |
| subreddit_samples: List[str]) -> QueryMetrics: | |
| """Execute a single FTS query with filters""" | |
| # Generate query text | |
| fts_query = generate_fts_query(num_words, query_terms) | |
| # Generate filter conditions | |
| filter_clause, filter_types = generate_filter_conditions(num_filters, subreddit_samples) | |
| start_time = time.time() | |
| error_msg = None | |
| rows_returned = 0 | |
| try: | |
| # Build the query | |
| query = table.search(fts_query, query_type="fts", fts_columns=["body"]) # FTS on body column | |
| # Add filters if any | |
| if filter_clause: | |
| query = query.where(filter_clause) | |
| # Execute query with limit | |
| result = query.limit(10).to_arrow() | |
| rows_returned = len(result) | |
| except Exception as e: | |
| error_msg = str(e) | |
| latency_ms = (time.time() - start_time) * 1000 | |
| return QueryMetrics( | |
| query_id=query_id, | |
| process_id=process_id, | |
| query_text=f"FTS: '{fts_query}'" + (f" WHERE {filter_clause}" if filter_clause else ""), | |
| num_words=num_words, | |
| num_filters=num_filters, | |
| filter_types=filter_types, | |
| latency_ms=latency_ms, | |
| rows_returned=rows_returned, | |
| error=error_msg | |
| ) | |
| def run_process_benchmark(args_tuple) -> List[QueryMetrics]: | |
| """Run benchmark in a single process""" | |
| process_id, queries_per_process, word_count, filter_count, warmup_queries = args_tuple | |
| print(f"Process {process_id}: Starting with {queries_per_process} queries") | |
| try: | |
| # Connect to database | |
| db = connect_lancedb() | |
| table = db.open_table(TABLE_NAME) | |
| # Simple test query | |
| # Fetch query terms and subreddit samples | |
| query_terms = fetch_query_terms() | |
| subreddit_samples = fetch_subreddit_samples() | |
| print(f"Process {process_id}: Loaded {len(query_terms)} query terms and {len(subreddit_samples)} subreddits") | |
| # Warmup queries | |
| if warmup_queries > 0: | |
| print(f"Process {process_id}: Running {warmup_queries} warmup queries...") | |
| for _ in range(warmup_queries): | |
| try: | |
| run_single_fts_query( | |
| table, -1, process_id, | |
| word_count, # Fixed word count | |
| filter_count, # Fixed filter count | |
| query_terms, subreddit_samples | |
| ) | |
| except: | |
| pass | |
| # Benchmark queries | |
| results = [] | |
| print(f"Process {process_id}: Starting benchmark queries...") | |
| benchmark_start_time = time.time() | |
| for i in range(queries_per_process): | |
| # Use fixed word count and filter count for all queries | |
| num_words = word_count | |
| num_filters = filter_count | |
| metrics = run_single_fts_query( | |
| table, i, process_id, | |
| num_words, num_filters, | |
| query_terms, subreddit_samples | |
| ) | |
| results.append(metrics) | |
| # Progress reporting | |
| if (i + 1) % 100 == 0: | |
| successful = sum(1 for r in results if r.error is None) | |
| avg_latency = np.mean([r.latency_ms for r in results if r.error is None]) | |
| elapsed_time = time.time() - benchmark_start_time | |
| current_qps = len(results) / elapsed_time if elapsed_time > 0 else 0 | |
| print(f"Process {process_id}: Completed {i+1}/{queries_per_process} queries " | |
| f"(Success: {successful}, Avg latency: {avg_latency:.1f}ms, QPS: {current_qps:.1f})") | |
| print(f"Process {process_id}: Completed all {queries_per_process} queries") | |
| return results | |
| except Exception as e: | |
| print(f"Process {process_id}: Failed with error: {e}") | |
| return [] | |
| def aggregate_results(all_metrics: List[QueryMetrics]) -> BenchmarkResults: | |
| """Aggregate metrics from all processes""" | |
| successful_metrics = [m for m in all_metrics if m.error is None] | |
| failed_metrics = [m for m in all_metrics if m.error is not None] | |
| latencies = [m.latency_ms for m in successful_metrics] | |
| # Calculate filter distribution | |
| filter_distribution = {} | |
| word_count_distribution = {} | |
| for m in all_metrics: | |
| # Count filter usage | |
| for filter_type in m.filter_types: | |
| filter_distribution[filter_type] = filter_distribution.get(filter_type, 0) + 1 | |
| # Count word usage | |
| word_count_distribution[m.num_words] = word_count_distribution.get(m.num_words, 0) + 1 | |
| # Calculate total time (max time across all processes) | |
| if all_metrics: | |
| total_time = max(m.latency_ms for m in all_metrics) / 1000.0 | |
| else: | |
| total_time = 0 | |
| return BenchmarkResults( | |
| total_queries=len(all_metrics), | |
| successful_queries=len(successful_metrics), | |
| failed_queries=len(failed_metrics), | |
| total_time_seconds=total_time, | |
| queries_per_second=len(all_metrics) / total_time if total_time > 0 else 0, | |
| avg_latency_ms=np.mean(latencies) if latencies else 0, | |
| p50_latency_ms=np.percentile(latencies, 50) if latencies else 0, | |
| p90_latency_ms=np.percentile(latencies, 90) if latencies else 0, | |
| p95_latency_ms=np.percentile(latencies, 95) if latencies else 0, | |
| p99_latency_ms=np.percentile(latencies, 99) if latencies else 0, | |
| avg_rows_returned=np.mean([m.rows_returned for m in successful_metrics]) if successful_metrics else 0, | |
| filter_distribution=filter_distribution, | |
| word_count_distribution=word_count_distribution | |
| ) | |
| def main(): | |
| parser = argparse.ArgumentParser( | |
| description="Multi-process FTS benchmark for LanceDB", | |
| formatter_class=argparse.RawDescriptionHelpFormatter, | |
| epilog=""" | |
| This benchmark tests Full-Text Search (FTS) performance on the 600M row Reddit dataset. | |
| Setup: | |
| 1. Ensure words.txt exists (list of query words, one per line) | |
| 2. Ensure subreddits.txt exists (list of subreddits, one per line) | |
| 3. Run benchmark with desired parameters | |
| Filter Order (sequential): | |
| Temporal: year, month, day_of_week, hour, is_weekend | |
| Categorical: subreddit, score_category, engagement_level | |
| Boolean: is_controversial, is_viral, is_deleted, is_removed, has_url, has_emoji | |
| Numeric: score, controversiality, comment_length, word_count, fake_sentiment_score | |
| Fake: fake_category_small, fake_category_medium, fake_category_large, fake_category_xlarge | |
| Examples: | |
| # Default: 3 words, 4 filters, 2 processes | |
| python fts_benchmark.py | |
| # Pure FTS (no filters) | |
| python fts_benchmark.py --words 5 --filters 0 | |
| # Heavy filtering (8 filters) | |
| python fts_benchmark.py -p 4 -q 500 --words 3 --filters 8 | |
| # High throughput test | |
| python fts_benchmark.py -p 8 -q 1000 --words 2 --filters 4 | |
| # Test different complexities | |
| python fts_benchmark.py --words 1 --filters 2 # Simple | |
| python fts_benchmark.py --words 5 --filters 10 # Complex | |
| """ | |
| ) | |
| parser.add_argument("-p", "--processes", type=int, default=4, | |
| help="Number of processes (default: 4)") | |
| parser.add_argument("-q", "--queries", type=int, default=100, | |
| help="Total number of queries (default: 100)") | |
| parser.add_argument("-w", "--warmup", type=int, default=10, | |
| help="Warmup queries per process (default: 10)") | |
| parser.add_argument("--words", type=str, default="3", | |
| help="Number of words in FTS query. Single value for fixed, comma-separated for random (default: 3)") | |
| parser.add_argument("--filters", type=str, default="4", | |
| help="Number of filters to apply. Single value for fixed, comma-separated for random (default: 4)") | |
| parser.add_argument("-o", "--output", type=str, default="fts_benchmark_results.csv", | |
| help="Output file for results (default: fts_benchmark_results.csv)") | |
| args = parser.parse_args() | |
| # Parse word and filter counts - now expects single values | |
| word_count = int(args.words) | |
| filter_count = int(args.filters) | |
| print("="*70) | |
| print("FTS Benchmark Configuration") | |
| print("="*70) | |
| print(f"Processes: {args.processes}") | |
| print(f"Total queries: {args.queries}") | |
| print(f"Queries per process: {args.queries // args.processes}") | |
| print(f"Warmup queries per process: {args.warmup}") | |
| print(f"Words per query: {word_count}") | |
| print(f"Filters per query: {filter_count}") | |
| print(f"Output file: {args.output}") | |
| print("="*70) | |
| # Test connection | |
| print("\nTesting connection...") | |
| try: | |
| db = connect_lancedb() | |
| table = db.open_table(TABLE_NAME) | |
| row_count = table.count_rows() | |
| print(f"✓ Connected to dataset with {row_count:,} rows") | |
| except Exception as e: | |
| print(f"✗ Failed to connect: {e}") | |
| return 1 | |
| # Prepare process arguments - now with fixed values | |
| queries_per_process = args.queries // args.processes | |
| process_args = [ | |
| (i, queries_per_process, word_count, filter_count, args.warmup) | |
| for i in range(args.processes) | |
| ] | |
| # Run benchmark | |
| print(f"\nStarting benchmark with {args.processes} processes...") | |
| start_time = time.time() | |
| if args.processes == 1: | |
| # Single process | |
| all_metrics = run_process_benchmark(process_args[0]) | |
| else: | |
| # Multi-process | |
| with mp.Pool(processes=args.processes) as pool: | |
| results = pool.map(run_process_benchmark, process_args) | |
| all_metrics = [m for process_results in results for m in process_results] | |
| total_time = time.time() - start_time | |
| # Aggregate results | |
| benchmark_results = aggregate_results(all_metrics) | |
| benchmark_results.total_time_seconds = total_time # Use actual wall time | |
| benchmark_results.queries_per_second = len(all_metrics) / total_time if total_time > 0 else 0 | |
| # Print summary | |
| print("\n" + "="*70) | |
| print("Benchmark Results") | |
| print("="*70) | |
| print(f"Total queries: {benchmark_results.total_queries}") | |
| print(f"Successful: {benchmark_results.successful_queries}") | |
| print(f"Failed: {benchmark_results.failed_queries}") | |
| print(f"Total time: {benchmark_results.total_time_seconds:.2f} seconds") | |
| print(f"Queries per second: {benchmark_results.queries_per_second:.2f}") | |
| print(f"\nLatency percentiles (ms):") | |
| print(f" Average: {benchmark_results.avg_latency_ms:.2f}") | |
| print(f" P50: {benchmark_results.p50_latency_ms:.2f}") | |
| print(f" P90: {benchmark_results.p90_latency_ms:.2f}") | |
| print(f" P95: {benchmark_results.p95_latency_ms:.2f}") | |
| print(f" P99: {benchmark_results.p99_latency_ms:.2f}") | |
| print(f"\nAverage rows returned: {benchmark_results.avg_rows_returned:.1f}") | |
| # Save results to CSV | |
| import csv | |
| import os | |
| from datetime import datetime | |
| # Prepare row data | |
| row_data = { | |
| 'timestamp': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), | |
| 'processes': args.processes, | |
| 'total_queries': args.queries, | |
| 'warmup_queries': args.warmup, | |
| 'words_per_query': word_count, | |
| 'filters_per_query': filter_count, | |
| 'queries_per_second': benchmark_results.queries_per_second, | |
| 'successful_queries': benchmark_results.successful_queries, | |
| 'failed_queries': benchmark_results.failed_queries, | |
| 'avg_latency_ms': benchmark_results.avg_latency_ms, | |
| 'p50_latency_ms': benchmark_results.p50_latency_ms, | |
| 'p90_latency_ms': benchmark_results.p90_latency_ms, | |
| 'p95_latency_ms': benchmark_results.p95_latency_ms, | |
| 'p99_latency_ms': benchmark_results.p99_latency_ms, | |
| 'avg_rows_returned': benchmark_results.avg_rows_returned, | |
| 'total_time_seconds': benchmark_results.total_time_seconds | |
| } | |
| # Check if file exists to determine if we need to write header | |
| file_exists = os.path.exists(args.output) | |
| # Write or append to CSV | |
| with open(args.output, 'a', newline='') as f: | |
| writer = csv.DictWriter(f, fieldnames=row_data.keys()) | |
| # Write header if file is new | |
| if not file_exists: | |
| writer.writeheader() | |
| # Write the results | |
| writer.writerow(row_data) | |
| print(f"\n✓ Results {'appended to' if file_exists else 'saved to'} {args.output}") | |
| print("="*70) | |
| return 0 | |
| if __name__ == "__main__": | |
| exit(main()) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| """ | |
| Prewarm Reddit dataset by month and column groups using LanceDB remote API | |
| Breaks prewarming into smaller chunks for better performance | |
| Prewarm the data in plan executor for the take operations. | |
| """ | |
| import lancedb | |
| import time | |
| import logging | |
| from concurrent.futures import ThreadPoolExecutor, as_completed | |
| from typing import List, Tuple, Dict | |
| from dataclasses import dataclass | |
| from datetime import datetime | |
| # Create log filename with timestamp | |
| log_filename = f"prewarm_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log" | |
| # Configure logging to both console and file | |
| logging.basicConfig( | |
| level=logging.INFO, | |
| format='%(asctime)s - %(levelname)s - %(message)s', | |
| handlers=[ | |
| logging.StreamHandler(), # Console output | |
| logging.FileHandler(log_filename) # File output | |
| ] | |
| ) | |
| logger = logging.getLogger(__name__) | |
| # Log the start | |
| logger.info(f"Logging to both console and file: {log_filename}") | |
| # LanceDB connection details from 0810 | |
| DATABASE = "internet" | |
| TABLE_NAME = "reddit_comments" | |
| LANCEDB_HOST = "http://internal-k8s-lancedb-lancedbq-09d85d35db-379805588.us-west-2.elb.amazonaws.com:80" | |
| API_KEY = "sk_reevo_prod_5a4d838fe66bbef2b" | |
| REGION = "us-west-2" | |
| # Column groups based on data type and access patterns | |
| COLUMN_GROUPS = { | |
| 'identifiers': ['id', 'author', 'link_id', 'subreddit_id'], | |
| 'content': ['body', 'subreddit'], | |
| 'metrics': ['score', 'controversiality', 'created_utc'], | |
| 'temporal': ['year', 'month', 'day_of_week', 'hour', 'is_weekend'], | |
| 'derived_text': ['comment_length', 'word_count', 'has_url', 'has_emoji'], | |
| 'status': ['is_deleted', 'is_removed', 'is_controversial', 'is_viral'], | |
| 'categories': ['score_category', 'engagement_level'], | |
| 'fake_small': ['fake_sentiment_score', 'fake_category_small'], | |
| 'fake_large': ['fake_category_medium', 'fake_category_large', 'fake_category_xlarge'] | |
| } | |
| @dataclass | |
| class PrewarmTask: | |
| """Represents a single prewarm task""" | |
| year: int | |
| month: int | |
| column_group: str | |
| columns: List[str] | |
| def connect_lancedb(): | |
| """Connect to LanceDB remote""" | |
| return lancedb.connect( | |
| uri=f"db://{DATABASE}", | |
| host_override=LANCEDB_HOST, | |
| api_key=API_KEY, | |
| region=REGION | |
| ) | |
| def prewarm_task(task: PrewarmTask) -> Dict: | |
| """Execute a single prewarm task for specific columns in a month""" | |
| start_time = time.time() | |
| try: | |
| # Connect and open table | |
| db = connect_lancedb() | |
| table = db.open_table(TABLE_NAME) | |
| # Query specific columns for this month | |
| result = table.search() \ | |
| .where(f"year = {task.year} AND month = {task.month}") \ | |
| .select(task.columns) \ | |
| .limit(50000000) \ | |
| .to_arrow() | |
| rows_read = len(result) | |
| elapsed = time.time() - start_time | |
| return { | |
| 'year_month': f"{task.year}-{task.month:02d}", | |
| 'column_group': task.column_group, | |
| 'columns': len(task.columns), | |
| 'rows_read': rows_read, | |
| 'elapsed_time': elapsed, | |
| 'rows_per_second': rows_read / elapsed if elapsed > 0 else 0 | |
| } | |
| except Exception as e: | |
| logger.error(f"Error prewarming {task.year}-{task.month:02d} [{task.column_group}]: {e}") | |
| return { | |
| 'year_month': f"{task.year}-{task.month:02d}", | |
| 'column_group': task.column_group, | |
| 'rows_read': 0, | |
| 'elapsed_time': time.time() - start_time, | |
| 'error': str(e) | |
| } | |
| def generate_prewarm_tasks() -> List[PrewarmTask]: | |
| """Generate all prewarm tasks (month x column_group combinations)""" | |
| tasks = [] | |
| # All months in the dataset | |
| year_months = [ | |
| (2012, 1), (2012, 2), (2012, 3), (2012, 4), (2012, 5), (2012, 6), | |
| (2012, 7), (2012, 8), (2012, 9), (2012, 10), (2012, 11), (2012, 12), | |
| (2013, 1), (2013, 2), (2013, 3), (2013, 4), (2013, 5), (2013, 6), | |
| (2013, 7), (2013, 8), (2013, 9), (2013, 10), (2013, 11), (2013, 12), | |
| (2014, 1), (2014, 2), (2014, 3), (2014, 4) | |
| ] | |
| # Create tasks for each month x column_group combination | |
| for year, month in year_months: | |
| for group_name, columns in COLUMN_GROUPS.items(): | |
| tasks.append(PrewarmTask(year, month, group_name, columns)) | |
| return tasks | |
| def parallel_prewarm(max_workers: int = 12): | |
| """Prewarm all months and columns in parallel""" | |
| # Generate all tasks | |
| tasks = generate_prewarm_tasks() | |
| logger.info(f"\n{'='*70}") | |
| logger.info(f"Starting Parallel Prewarm") | |
| logger.info(f"Total tasks: {len(tasks)} (28 months × {len(COLUMN_GROUPS)} column groups)") | |
| logger.info(f"Max parallel workers: {max_workers}") | |
| logger.info(f"Column groups: {', '.join(COLUMN_GROUPS.keys())}") | |
| logger.info(f"{'='*70}\n") | |
| overall_start = time.time() | |
| results = [] | |
| total_rows = 0 | |
| last_progress_time = time.time() | |
| progress_interval = 10 # Print progress summary every 10 seconds | |
| with ThreadPoolExecutor(max_workers=max_workers) as executor: | |
| # Submit all tasks | |
| future_to_task = { | |
| executor.submit(prewarm_task, task): task | |
| for task in tasks | |
| } | |
| completed = 0 | |
| successful = 0 | |
| failed = 0 | |
| # Process results as they complete | |
| for future in as_completed(future_to_task): | |
| task = future_to_task[future] | |
| completed += 1 | |
| try: | |
| result = future.result() | |
| results.append(result) | |
| if 'error' not in result: | |
| successful += 1 | |
| total_rows += result['rows_read'] | |
| # Calculate progress | |
| progress_pct = (completed / len(tasks)) * 100 | |
| elapsed = time.time() - overall_start | |
| eta_seconds = (elapsed / completed) * (len(tasks) - completed) if completed > 0 else 0 | |
| eta_minutes = eta_seconds / 60 | |
| logger.info(f"✓ [{completed:3d}/{len(tasks)}] ({progress_pct:5.1f}%) " | |
| f"{result['year_month']} [{result['column_group']:12s}]: " | |
| f"{result['rows_read']:,} rows in {result['elapsed_time']:.1f}s " | |
| f"(ETA: {eta_minutes:.1f}min)") | |
| else: | |
| failed += 1 | |
| logger.error(f"✗ [{completed:3d}/{len(tasks)}] ({(completed/len(tasks)*100):5.1f}%) " | |
| f"{result['year_month']} [{result['column_group']}]: {result['error']}") | |
| # Print progress summary every N seconds | |
| current_time = time.time() | |
| if current_time - last_progress_time >= progress_interval: | |
| elapsed_total = current_time - overall_start | |
| rate = total_rows / elapsed_total if elapsed_total > 0 else 0 | |
| logger.info(f"\n📊 Progress Update: {completed}/{len(tasks)} tasks " | |
| f"({successful} success, {failed} failed)") | |
| logger.info(f" Total rows processed: {total_rows:,} " | |
| f"(Rate: {rate:,.0f} rows/sec)") | |
| logger.info(f" Elapsed: {elapsed_total/60:.1f} min, " | |
| f"ETA: {eta_minutes:.1f} min\n") | |
| last_progress_time = current_time | |
| except Exception as e: | |
| failed += 1 | |
| logger.error(f"✗ [{completed:3d}/{len(tasks)}] Task failed: {e}") | |
| # Summary statistics | |
| overall_elapsed = time.time() - overall_start | |
| # Group results by month for summary | |
| month_stats = {} | |
| column_stats = {} | |
| for result in results: | |
| if 'error' not in result: | |
| ym = result['year_month'] | |
| cg = result['column_group'] | |
| # Month stats | |
| if ym not in month_stats: | |
| month_stats[ym] = {'groups': 0, 'rows': 0, 'time': 0} | |
| month_stats[ym]['groups'] += 1 | |
| month_stats[ym]['rows'] += result['rows_read'] | |
| month_stats[ym]['time'] += result['elapsed_time'] | |
| # Column group stats | |
| if cg not in column_stats: | |
| column_stats[cg] = {'months': 0, 'rows': 0, 'time': 0} | |
| column_stats[cg]['months'] += 1 | |
| column_stats[cg]['rows'] += result['rows_read'] | |
| column_stats[cg]['time'] += result['elapsed_time'] | |
| logger.info(f"\n{'='*70}") | |
| logger.info(f"✅ PREWARM COMPLETED") | |
| logger.info(f"{'='*70}") | |
| logger.info(f"📈 Overall Statistics:") | |
| logger.info(f" Tasks: {successful} successful, {failed} failed (out of {len(tasks)} total)") | |
| logger.info(f" Success rate: {(successful/len(tasks)*100):.1f}%") | |
| logger.info(f" Total rows warmed: {total_rows:,}") | |
| logger.info(f" Total time: {overall_elapsed/60:.1f} minutes ({overall_elapsed:.1f} seconds)") | |
| logger.info(f" Average rate: {total_rows/overall_elapsed:,.0f} rows/second") | |
| logger.info(f" Average time per task: {overall_elapsed/completed:.1f} seconds") | |
| # Show month summary (top 5) | |
| if month_stats: | |
| logger.info(f"\n📅 Top Months by Rows:") | |
| sorted_months = sorted(month_stats.items(), key=lambda x: x[1]['rows'], reverse=True)[:5] | |
| for month, stats in sorted_months: | |
| completion = (stats['groups']/len(COLUMN_GROUPS))*100 | |
| logger.info(f" {month}: {stats['rows']:,} rows, " | |
| f"{stats['groups']}/{len(COLUMN_GROUPS)} groups ({completion:.0f}% complete)") | |
| # Show column group performance | |
| if column_stats: | |
| logger.info(f"\n📊 Column Group Performance:") | |
| for group in sorted(column_stats.keys()): | |
| stats = column_stats[group] | |
| avg_time = stats['time'] / stats['months'] if stats['months'] > 0 else 0 | |
| logger.info(f" {group:15s}: {stats['months']}/28 months, " | |
| f"avg {avg_time:.1f}s per month") | |
| logger.info(f"\n{'='*70}") | |
| logger.info(f"🎉 Prewarm finished in {overall_elapsed/60:.1f} minutes!") | |
| logger.info(f"📝 Full log saved to: {log_filename}") | |
| logger.info(f"{'='*70}\n") | |
| return results | |
| def main(): | |
| """Main entry point""" | |
| import argparse | |
| parser = argparse.ArgumentParser( | |
| description="Prewarm Reddit dataset by month and column groups", | |
| formatter_class=argparse.RawDescriptionHelpFormatter, | |
| epilog=""" | |
| This script prewarms the dataset by: | |
| 1. Breaking data into 28 months (2012-01 to 2014-04) | |
| 2. Breaking columns into 9 logical groups | |
| 3. Creating 252 total tasks (28 × 9) | |
| 4. Running tasks in parallel with configurable workers | |
| Column Groups: | |
| - identifiers: id, author, link_id, subreddit_id | |
| - content: body, subreddit | |
| - metrics: score, controversiality, created_utc | |
| - temporal: year, month, day_of_week, hour, is_weekend | |
| - derived_text: comment_length, word_count, has_url, has_emoji | |
| - status: is_deleted, is_removed, is_controversial, is_viral | |
| - categories: score_category, engagement_level | |
| - fake_small: fake_sentiment_score, fake_category_small | |
| - fake_large: fake_category_medium/large/xlarge | |
| Examples: | |
| # Default 12 workers | |
| python prewarm_by_month_columns.py | |
| # Use more workers for faster prewarm | |
| python prewarm_by_month_columns.py --workers 24 | |
| # Use fewer workers for less load | |
| python prewarm_by_month_columns.py --workers 6 | |
| """ | |
| ) | |
| parser.add_argument("--workers", type=int, default=12, | |
| help="Maximum number of parallel workers (default: 12)") | |
| args = parser.parse_args() | |
| try: | |
| # Test connection first | |
| logger.info("Testing LanceDB connection...") | |
| db = connect_lancedb() | |
| table = db.open_table(TABLE_NAME) | |
| count = table.count_rows() | |
| logger.info(f"✓ Connected! Dataset has {count:,} total rows") | |
| # Show configuration | |
| logger.info(f"\n📋 Configuration:") | |
| logger.info(f" Max workers: {args.workers}") | |
| logger.info(f" Months to prewarm: 28 (2012-01 to 2014-04)") | |
| logger.info(f" Column groups: {len(COLUMN_GROUPS)}") | |
| logger.info(f" Total tasks: {28 * len(COLUMN_GROUPS)}") | |
| # Run prewarm | |
| results = parallel_prewarm(max_workers=args.workers) | |
| return 0 | |
| except Exception as e: | |
| logger.error(f"Failed: {e}") | |
| return 1 | |
| if __name__ == "__main__": | |
| exit(main()) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| """ | |
| Parallel processor for Parquet to Lance conversion with memory optimization | |
| Uses multiple workers to speed up S3 uploads while keeping memory usage low | |
| """ | |
| import os | |
| import sys | |
| import time | |
| import pyarrow as pa | |
| import pyarrow.parquet as pq | |
| import lance | |
| import numpy as np | |
| import random | |
| from datetime import datetime, timedelta | |
| import argparse | |
| import logging | |
| from typing import Dict, List, Any, Optional, Tuple | |
| import gc | |
| import glob | |
| from multiprocessing import Pool, Queue, Manager | |
| from functools import partial | |
| import queue | |
| logging.basicConfig(level=logging.INFO, format='%(asctime)s - Worker-%(process)d - %(levelname)s - %(message)s') | |
| logger = logging.getLogger(__name__) | |
| # Global memory limit per worker | |
| MAX_BATCH_SIZE = 5000 # Process only 5k rows at a time per worker | |
| MAX_MEMORY_MB = 100 # Each worker tries to stay under 100MB | |
| def estimate_batch_memory(num_rows: int, avg_row_size: int = 525) -> float: | |
| """Estimate memory usage for a batch in MB""" | |
| return (num_rows * avg_row_size * 2) / (1024 * 1024) | |
| def transform_row(row_dict: Dict, global_idx: int) -> Dict: | |
| """Transform a single row to our schema - memory efficient""" | |
| # Extract values with defaults | |
| body = str(row_dict.get('body', '')) if row_dict.get('body') is not None else '' | |
| score = int(row_dict.get('score', 0)) if row_dict.get('score') is not None else 0 | |
| created_utc = int(row_dict.get('created_utc', 0)) if row_dict.get('created_utc') is not None else 0 | |
| author = str(row_dict.get('author', '')) if row_dict.get('author') is not None else '' | |
| controversiality = int(row_dict.get('controversiality', 0)) if row_dict.get('controversiality') is not None else 0 | |
| # Create transformed row | |
| transformed = {} | |
| # Copy original fields | |
| for field in ['id', 'author', 'body', 'controversiality', 'created_utc', | |
| 'link_id', 'score', 'subreddit', 'subreddit_id']: | |
| transformed[field] = row_dict.get(field) | |
| # Derived temporal columns | |
| if created_utc > 0: | |
| try: | |
| dt = datetime.fromtimestamp(created_utc) | |
| transformed['year'] = dt.year | |
| transformed['month'] = dt.month | |
| transformed['day_of_week'] = dt.weekday() | |
| transformed['hour'] = dt.hour | |
| transformed['is_weekend'] = dt.weekday() >= 5 | |
| except: | |
| transformed['year'] = 2012 | |
| transformed['month'] = 1 | |
| transformed['day_of_week'] = 0 | |
| transformed['hour'] = 0 | |
| transformed['is_weekend'] = False | |
| else: | |
| transformed['year'] = 2012 | |
| transformed['month'] = 1 | |
| transformed['day_of_week'] = 0 | |
| transformed['hour'] = 0 | |
| transformed['is_weekend'] = False | |
| # Text-based columns | |
| transformed['comment_length'] = len(body) | |
| transformed['word_count'] = len(body.split()) if body else 0 | |
| transformed['has_url'] = 'http' in body.lower() or 'www' in body.lower() | |
| transformed['has_emoji'] = any(ord(c) > 127 for c in body[:100]) if body else False | |
| # Status columns | |
| transformed['is_deleted'] = author == '[deleted]' | |
| transformed['is_removed'] = body == '[removed]' | |
| # Score category | |
| if score < 0: | |
| transformed['score_category'] = 'negative' | |
| elif score < 10: | |
| transformed['score_category'] = 'low' | |
| elif score < 100: | |
| transformed['score_category'] = 'medium' | |
| elif score < 1000: | |
| transformed['score_category'] = 'high' | |
| else: | |
| transformed['score_category'] = 'viral' | |
| transformed['is_controversial'] = bool(controversiality) | |
| transformed['engagement_level'] = 'low' if score < 10 else 'medium' if score < 100 else 'high' | |
| transformed['is_viral'] = score > 1000 | |
| # Fake columns (deterministic) | |
| random.seed(global_idx) | |
| np.random.seed(global_idx) | |
| transformed['fake_sentiment_score'] = float(np.clip(np.random.normal(0.1, 0.4), -1.0, 1.0)) | |
| transformed['fake_category_small'] = f'category_{global_idx % 10}' | |
| transformed['fake_category_medium'] = f'category_{global_idx % 100}' | |
| transformed['fake_category_large'] = f'category_{global_idx % 1000}' | |
| transformed['fake_category_xlarge'] = f'category_{global_idx % 10000}' | |
| return transformed | |
| def process_batch_chunk(args: Tuple[str, int, int, int, int, str, int, bool]) -> Tuple[bool, int, str]: | |
| """Process a single batch chunk - designed to be run in parallel""" | |
| parquet_path, batch_start, batch_end, file_start_row, global_batch_idx, s3_uri, batch_size, is_first_batch = args | |
| try: | |
| # Open Parquet file | |
| parquet_file = pq.ParquetFile(parquet_path) | |
| num_row_groups = parquet_file.num_row_groups | |
| # Read the specific batch | |
| if num_row_groups == 1: | |
| table = parquet_file.read_row_group(0) | |
| batch_table = table.slice(batch_start, batch_end - batch_start) | |
| else: | |
| # Multiple row groups - find the right one(s) | |
| batch_tables = [] | |
| current_pos = 0 | |
| for rg_idx in range(num_row_groups): | |
| rg_metadata = parquet_file.metadata.row_group(rg_idx) | |
| rg_num_rows = rg_metadata.num_rows | |
| if current_pos + rg_num_rows > batch_start and current_pos < batch_end: | |
| rg_table = parquet_file.read_row_group(rg_idx) | |
| slice_start = max(0, batch_start - current_pos) | |
| slice_end = min(rg_num_rows, batch_end - current_pos) | |
| slice_length = slice_end - slice_start | |
| if slice_length > 0: | |
| batch_tables.append(rg_table.slice(slice_start, slice_length)) | |
| current_pos += rg_num_rows | |
| if current_pos >= batch_end: | |
| break | |
| if batch_tables: | |
| batch_table = pa.concat_tables(batch_tables) | |
| else: | |
| return False, 0, f"No data found for batch {batch_start}-{batch_end}" | |
| # Transform the batch | |
| batch_dict = batch_table.to_pydict() | |
| batch_data = [] | |
| for i in range(len(batch_dict.get('id', []))): | |
| row_dict = {key: values[i] if i < len(values) else None | |
| for key, values in batch_dict.items()} | |
| global_idx = file_start_row + batch_start + i | |
| transformed_row = transform_row(row_dict, global_idx) | |
| batch_data.append(transformed_row) | |
| # Convert to table | |
| column_data = {} | |
| for key in batch_data[0].keys(): | |
| column_data[key] = [row[key] for row in batch_data] | |
| table = pa.table(column_data) | |
| # Clear memory | |
| del batch_data | |
| del column_data | |
| del batch_dict | |
| del batch_table | |
| gc.collect() | |
| # Upload to S3 with retries | |
| mode = "create" if is_first_batch else "append" | |
| for retry in range(3): | |
| try: | |
| lance.write_dataset( | |
| table, s3_uri, mode=mode, | |
| storage_options={ | |
| "timeout": "120s", | |
| "connect_timeout": "30s", | |
| "aws_region": "us-west-2" | |
| } | |
| ) | |
| rows_uploaded = len(table) | |
| del table | |
| gc.collect() | |
| return True, rows_uploaded, f"Batch {global_batch_idx} uploaded {rows_uploaded} rows" | |
| except Exception as e: | |
| if retry < 2: | |
| time.sleep(2 ** retry) | |
| else: | |
| return False, 0, f"Batch {global_batch_idx} failed after 3 attempts: {e}" | |
| except Exception as e: | |
| return False, 0, f"Batch {global_batch_idx} error: {e}" | |
| def process_parquet_file_parallel(parquet_path: str, start_row: int, end_row: int, | |
| file_start_row: int, file_end_row: int, | |
| s3_uri: str, batch_size: int, num_workers: int) -> int: | |
| """Process Parquet file using parallel workers""" | |
| logger.info(f"\nProcessing: {os.path.basename(parquet_path)}") | |
| logger.info(f" File rows: {file_start_row:,} to {file_end_row:,}") | |
| logger.info(f" Target rows: {start_row:,} to {end_row:,}") | |
| logger.info(f" Batch size: {batch_size:,} rows per worker") | |
| logger.info(f" Workers: {num_workers}") | |
| # Determine overlap | |
| overlap_start = max(start_row, file_start_row) | |
| overlap_end = min(end_row, file_end_row) | |
| if overlap_start >= overlap_end: | |
| logger.info(" No overlap, skipping") | |
| return 0 | |
| # Calculate file indices | |
| start_idx_in_file = overlap_start - file_start_row | |
| end_idx_in_file = overlap_end - file_start_row | |
| rows_to_process = end_idx_in_file - start_idx_in_file | |
| logger.info(f" Processing {rows_to_process:,} rows from this file") | |
| # Create batch tasks | |
| batch_tasks = [] | |
| is_first_batch = (overlap_start == start_row) | |
| for batch_idx, batch_start in enumerate(range(start_idx_in_file, end_idx_in_file, batch_size)): | |
| batch_end = min(batch_start + batch_size, end_idx_in_file) | |
| task = ( | |
| parquet_path, | |
| batch_start, | |
| batch_end, | |
| file_start_row, | |
| batch_idx, | |
| s3_uri, | |
| batch_size, | |
| is_first_batch and batch_idx == 0 | |
| ) | |
| batch_tasks.append(task) | |
| logger.info(f" Created {len(batch_tasks)} batch tasks") | |
| # Process batches in parallel | |
| rows_processed = 0 | |
| failed_batches = [] | |
| with Pool(processes=num_workers) as pool: | |
| # Process batches | |
| results = pool.map(process_batch_chunk, batch_tasks) | |
| for success, rows, message in results: | |
| if success: | |
| rows_processed += rows | |
| logger.info(f" ✓ {message}") | |
| else: | |
| failed_batches.append(message) | |
| logger.error(f" ✗ {message}") | |
| if failed_batches: | |
| logger.warning(f" {len(failed_batches)} batches failed") | |
| return rows_processed | |
| def get_file_ranges() -> Dict[str, tuple]: | |
| """Return known file ranges for Reddit dataset""" | |
| return { | |
| "data_RC_2012-01.parquet": (0, 15330487), | |
| "data_RC_2012-02.parquet": (15330487, 30590767), | |
| "data_RC_2012-03.parquet": (30590767, 47552099), | |
| "data_RC_2012-04.parquet": (47552099, 65520187), | |
| "data_RC_2012-05.parquet": (65520187, 84869121), | |
| "data_RC_2012-06.parquet": (84869121, 105592811), | |
| "data_RC_2012-07.parquet": (105592811, 128198572), | |
| "data_RC_2012-08.parquet": (128198572, 152106087), | |
| "data_RC_2012-09.parquet": (152106087, 173808807), | |
| "data_RC_2012-10.parquet": (173808807, 196788107), | |
| "data_RC_2012-11.parquet": (196788107, 219725517), | |
| "data_RC_2012-12.parquet": (219725517, 243738578), | |
| "data_RC_2013-01.parquet": (243738578, 271715127), | |
| "data_RC_2013-02.parquet": (271715127, 296959822), | |
| "data_RC_2013-03.parquet": (296959822, 325289787), | |
| "data_RC_2013-04.parquet": (325289787, 355530453), | |
| "data_RC_2013-05.parquet": (355530453, 386330256), | |
| "data_RC_2013-06.parquet": (386330256, 416799008), | |
| "data_RC_2013-07.parquet": (416799008, 449561130), | |
| "data_RC_2013-08.parquet": (449561130, 482335529), | |
| "data_RC_2013-09.parquet": (482335529, 512447614), | |
| "data_RC_2013-10.parquet": (512447614, 545889425), | |
| "data_RC_2013-11.parquet": (545889425, 580020960), | |
| "data_RC_2013-12.parquet": (580020960, 616275351), | |
| } | |
| def main(): | |
| parser = argparse.ArgumentParser( | |
| description="Parallel Parquet to Lance processor with memory optimization", | |
| formatter_class=argparse.RawDescriptionHelpFormatter, | |
| epilog=""" | |
| Parallel processing with memory optimization. | |
| Examples: | |
| # Process with 16 workers | |
| python process_parquet_to_lance_parallel.py \\ | |
| --start-row 400000000 \\ | |
| --end-row 600000000 \\ | |
| --workers 16 \\ | |
| --batch-size 5000 | |
| # More workers with smaller batches | |
| python process_parquet_to_lance_parallel.py \\ | |
| --start-row 400000000 \\ | |
| --end-row 600000000 \\ | |
| --workers 20 \\ | |
| --batch-size 2000 | |
| """ | |
| ) | |
| parser.add_argument("--start-row", type=int, required=True, | |
| help="Starting row index") | |
| parser.add_argument("--end-row", type=int, required=True, | |
| help="Ending row index") | |
| parser.add_argument("--workers", type=int, default=16, | |
| help="Number of parallel workers (default: 16)") | |
| parser.add_argument("--local-dir", default="/tmp/reddit_parquet", | |
| help="Directory containing downloaded Parquet files") | |
| parser.add_argument("--s3-uri", | |
| default="s3://lancedb-datasets-prod-us-west-2-reevo/internet/reddit_comments.dataset", | |
| help="S3 URI for Lance dataset output") | |
| parser.add_argument("--batch-size", type=int, default=5000, | |
| help="Rows per batch per worker (default: 5000)") | |
| parser.add_argument("--no-cleanup", action="store_true", | |
| help="Don't delete Parquet files after processing") | |
| args = parser.parse_args() | |
| # Estimate memory usage | |
| memory_per_worker = estimate_batch_memory(args.batch_size) | |
| total_memory = memory_per_worker * args.workers | |
| logger.info("="*70) | |
| logger.info("Parallel Reddit Parquet to Lance Processor") | |
| logger.info(f"Row range: {args.start_row:,} to {args.end_row:,}") | |
| logger.info(f"Workers: {args.workers}") | |
| logger.info(f"Batch size: {args.batch_size:,} rows per worker") | |
| logger.info(f"Est. memory: {memory_per_worker:.1f} MB per worker, {total_memory:.1f} MB total") | |
| logger.info("="*70) | |
| # Find Parquet files | |
| parquet_files = sorted(glob.glob(os.path.join(args.local_dir, "*.parquet"))) | |
| if not parquet_files: | |
| logger.error(f"No Parquet files found in {args.local_dir}") | |
| return 1 | |
| logger.info(f"\nFound {len(parquet_files)} Parquet files") | |
| # Get file ranges | |
| file_ranges = get_file_ranges() | |
| # Process each file | |
| total_rows_processed = 0 | |
| start_time = time.time() | |
| files_processed = 0 | |
| for parquet_path in parquet_files: | |
| basename = os.path.basename(parquet_path) | |
| # Find matching range | |
| file_start = file_end = None | |
| for key, (start, end) in file_ranges.items(): | |
| if key.replace('data_', '').replace('.parquet', '') in basename: | |
| file_start, file_end = start, end | |
| break | |
| if file_start is None: | |
| logger.warning(f"Unknown file range for {basename}, skipping") | |
| continue | |
| # Check if this file overlaps with our range | |
| if file_end <= args.start_row or file_start >= args.end_row: | |
| logger.info(f"\nSkipping {basename} (no overlap)") | |
| continue | |
| # Process this file with parallel workers | |
| logger.info(f"\nProcessing file {files_processed + 1}: {basename}") | |
| rows = process_parquet_file_parallel( | |
| parquet_path, | |
| args.start_row, | |
| args.end_row, | |
| file_start, | |
| file_end, | |
| args.s3_uri, | |
| args.batch_size, | |
| args.workers | |
| ) | |
| total_rows_processed += rows | |
| files_processed += 1 | |
| # Clean up file if requested | |
| if not args.no_cleanup and rows > 0: | |
| try: | |
| os.remove(parquet_path) | |
| logger.info(f" ✓ Deleted {basename} to free disk space") | |
| gc.collect() | |
| except Exception as e: | |
| logger.warning(f" Failed to delete {basename}: {e}") | |
| # Progress report | |
| elapsed = time.time() - start_time | |
| if elapsed > 0: | |
| rate = total_rows_processed / elapsed | |
| remaining = args.end_row - args.start_row - total_rows_processed | |
| eta = remaining / rate if rate > 0 else 0 | |
| logger.info(f"\nOverall progress: {total_rows_processed:,}/{args.end_row - args.start_row:,} rows") | |
| logger.info(f"Speed: {rate:.0f} rows/sec") | |
| logger.info(f"ETA: {timedelta(seconds=int(eta))}") | |
| logger.info(f"Files processed: {files_processed}") | |
| # Final summary | |
| total_time = time.time() - start_time | |
| logger.info("\n" + "="*70) | |
| logger.info("✓ Processing complete!") | |
| logger.info(f" Total rows: {total_rows_processed:,}") | |
| logger.info(f" Total time: {timedelta(seconds=int(total_time))}") | |
| logger.info(f" Average speed: {total_rows_processed/total_time:.0f} rows/sec") | |
| logger.info(f" Files processed: {files_processed}") | |
| logger.info(f" Output: {args.s3_uri}") | |
| logger.info("="*70) | |
| return 0 | |
| if __name__ == "__main__": | |
| sys.exit(main()) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| creepyPMs | |
| movies | |
| guns | |
| Calgary | |
| fffffffuuuuuuuuuuuu | |
| tf2 | |
| pics | |
| DotA2 | |
| AskReddit | |
| aww | |
| fantasyfootball | |
| nba | |
| Frugal | |
| motorcycles | |
| OkCupid | |
| toronto | |
| comicbooks | |
| funny | |
| LadyBoners | |
| shittyadvice | |
| todayilearned | |
| battlefield_4 | |
| dogemarket | |
| pokemon | |
| Graffiti | |
| malefashionadvice | |
| AdviceAnimals | |
| CFB | |
| books | |
| buildapc | |
| youtube | |
| xboxone | |
| harrypotter | |
| leagueoflegends | |
| DebateAnAtheist | |
| 4chan | |
| Autos | |
| mythbusters | |
| Gunners | |
| gonewildcurvy | |
| politics | |
| 3dsFCswap | |
| Advice | |
| fatpeoplestories | |
| GreekLife | |
| skyrim | |
| AskWomen | |
| Thailand | |
| diablo3 | |
| nosleep | |
| DnD | |
| WTF | |
| hcfactions | |
| intj | |
| atheism | |
| steelers | |
| zelda | |
| news | |
| SFGiants | |
| gamegrumps | |
| FIFA | |
| TrueArt | |
| teenagers | |
| PipeTobacco | |
| Fitness | |
| Conservatives_R_Us | |
| computerforensics | |
| RedditLaqueristas | |
| changemyview | |
| gifs | |
| nfl | |
| runescape | |
| friendsafari | |
| milf | |
| gaybros | |
| MonsterHunter | |
| MINI | |
| explainlikeimfive | |
| minerapocalypse | |
| BabyBumps | |
| videos | |
| IAmA | |
| SubredditDrama | |
| SanJoseSharks | |
| Judaism | |
| trees | |
| PS4 | |
| travel | |
| PostCollapse | |
| electronic_cigarette | |
| creepy | |
| bestof | |
| sharedota2 | |
| DCcomics | |
| TomHardy | |
| philadelphia | |
| worldnews | |
| science | |
| Barcelona | |
| linux | |
| Fallout | |
| Diablo | |
| exjw | |
| gaming | |
| soccer | |
| Dexter | |
| DunderMifflin | |
| technology | |
| gonewild | |
| mindcrack | |
| humor | |
| Dodgers | |
| JusticePorn | |
| golf | |
| GoneWildPlus | |
| olympia | |
| SimCity | |
| apple | |
| BigBrother | |
| AutoDetailing | |
| flying | |
| conspiracy | |
| SiouxFalls | |
| relationships | |
| sex | |
| photography | |
| trance | |
| skeptic | |
| Android | |
| dogecoin | |
| mildlyinteresting | |
| Christianity | |
| CoinedUp | |
| Guildwars2 | |
| picrequests | |
| doctorwho | |
| footballmanagergames | |
| motorcitykitties | |
| baseball | |
| capoeira | |
| Smite | |
| Miami | |
| gameofthrones | |
| sportsbook | |
| Hunting | |
| booksuggestions | |
| short | |
| programming | |
| canada | |
| ArtisanVideos | |
| Eve | |
| DarkSouls2 | |
| CODGhosts | |
| yugioh | |
| lincoln | |
| lesbros | |
| PopularOutcasts | |
| ChurchofZhakey | |
| Watches | |
| pokemontrades | |
| SampleSize | |
| hardwareswap | |
| PercyJacksonRP | |
| writing | |
| mexico | |
| photoshopbattles | |
| exmormon | |
| sydney | |
| Ladybonersgonecuddly | |
| DaftPunk | |
| wallpaper | |
| hiphopheads | |
| mylittlepony | |
| rit | |
| Random_Acts_Of_Amazon | |
| Avengers | |
| TheLastAirbender | |
| GetMotivated | |
| tattoos | |
| coheedandcambria | |
| brisbane | |
| LinuxActionShow | |
| pianocovers | |
| cringepics | |
| DesignMyRoom | |
| Assistance | |
| rpg | |
| TwoXChromosomes | |
| buildapcsalesuk | |
| sgsflair | |
| postprocessing | |
| cars | |
| CrazyIdeas | |
| Austin | |
| MLPLounge | |
| magicTCG | |
| Wellington | |
| montageparodies | |
| Firefighting | |
| brandnew | |
| iphone | |
| musictheory | |
| Drugs | |
| FeMRADebates | |
| hockey | |
| RandomActsOfGaming | |
| windowsphone | |
| personalfinance | |
| Petloss | |
| sports | |
| YAwriters | |
| jailbreak | |
| truegaming | |
| gamingpc | |
| SquaredCircle | |
| nsfw_gifs | |
| starcraft | |
| bigdickproblems | |
| Miata | |
| sysadmin | |
| Connecticut | |
| Wicca | |
| mymorningjacket | |
| chubby | |
| Planetside | |
| Minecraft | |
| Music | |
| microgrowery | |
| magicskyfairy | |
| AdoptMyVillager | |
| Conservative | |
| ShinyPokemon | |
| lifehacks | |
| ffxiv | |
| CHIBears | |
| Firearms | |
| Battlefield | |
| formula1 | |
| firstimpression | |
| OutreachHPG | |
| dirtypenpals | |
| answers | |
| minnesotatwins | |
| redsox | |
| drums | |
| Bluegrass | |
| playstation | |
| longboarding | |
| photocritique | |
| CanadaPolitics | |
| socialskills | |
| LGBTeens | |
| nintendo | |
| JRPG | |
| WorldofTanks | |
| Parenting | |
| MechanicalKeyboards | |
| woahdude | |
| raisedbynarcissists | |
| math | |
| Dota2Trade | |
| phish | |
| gentlemanboners | |
| Gear4Sale | |
| Unexpected | |
| darksouls | |
| dogs | |
| Vassal40k | |
| sharehearthstone | |
| Justrolledintotheshop | |
| airsoft | |
| Metal | |
| ireland | |
| asoiaf | |
| linux_gaming | |
| television | |
| TrueAtheism | |
| RoomPorn | |
| AskMen | |
| ainbow | |
| Random_Acts_Of_Pizza | |
| fullmoviesonyoutube | |
| drunkvapes | |
| startrek | |
| ladybonersgw | |
| abletonlive | |
| progresspics | |
| jmu | |
| newfoundland | |
| NYGiants | |
| washingtondc | |
| promos | |
| GrandTheftAutoV | |
| Rowing | |
| grool | |
| britishproblems | |
| CollegeBasketball | |
| AskHistorians | |
| mildlyamusing | |
| montreal | |
| ns2 | |
| offmychest | |
| asstastic | |
| woodworking | |
| fatlogic | |
| AsianHotties | |
| PrettyGirls | |
| raspberry_pi | |
| SRSsucks | |
| civ | |
| dubai | |
| FTH | |
| ACTrade | |
| ReefTank | |
| breakingbad | |
| Hammers | |
| britpics | |
| NetflixBestOf | |
| dontstarve | |
| gadgets | |
| Ska | |
| islam | |
| gaymers | |
| thewalkingdead | |
| pathofexile | |
| pcmasterrace | |
| Jeep | |
| MtF | |
| anonymous | |
| Boxing | |
| cringe | |
| nature | |
| wow | |
| VictoriaBC | |
| vita | |
| keto | |
| doublespeakprivilege | |
| dwarffortress | |
| Feminism | |
| portugal | |
| JustStemThings | |
| wicked_edge | |
| TheTransphobiaSquad | |
| polyamory | |
| twerking | |
| ravens | |
| IWantToLearn | |
| ShitPoliticsSays | |
| forwardsfromgrandma | |
| swtor | |
| OnePiece | |
| web_design | |
| ExposurePorn | |
| mcservers | |
| loseit | |
| SteamGameSwap | |
| bostontrees | |
| acturnips | |
| GlobalOffensive | |
| phoenix | |
| sandiego | |
| latterdaysaints | |
| legacyfps | |
| india | |
| RotMG | |
| LifeProTips | |
| geocaching | |
| HogwartsRP | |
| depression | |
| moderatepolitics | |
| Awesomenauts | |
| hearthstone | |
| Games | |
| Cigarettes | |
| vancouver | |
| manchester | |
| Ubuntu | |
| altgonewild | |
| police | |
| offbeat | |
| Portland | |
| chile | |
| ecigclassifieds | |
| minecraftsuggestions | |
| miamidolphins | |
| techsupport | |
| Saints | |
| Silverbugs | |
| counting | |
| hookah | |
| trueMusic | |
| cigars | |
| TheRedPill | |
| gaymersgonewild | |
| amiugly | |
| UIUC | |
| smashbros | |
| NoFap | |
| nasa | |
| circlejerk | |
| HeroesofNewerth | |
| tf2trade | |
| batman | |
| BDSMGW | |
| eagles | |
| GifSound | |
| KerbalSpaceProgram | |
| youtubehaiku | |
| WoT | |
| nova | |
| cripplingalcoholism | |
| Louisville | |
| skateboarding | |
| Jokes | |
| MMA | |
| ImGoingToHellForThis | |
| Bass | |
| Anarcho_Capitalism | |
| GekkoukanHigh | |
| pettyrevenge | |
| CoDCompetitive | |
| BipolarReddit | |
| casualiama | |
| confession | |
| dayz | |
| ManchesterNH | |
| Bonsai | |
| Military | |
| MakeupAddiction | |
| anime | |
| watercooling | |
| RealRedditFC | |
| SRSDiscussion | |
| halo | |
| TinyTits | |
| redditcommentates | |
| EnoughLibertarianSpam | |
| bindingofisaac | |
| hwstartups | |
| oculus | |
| philosophy | |
| lgbt | |
| disney | |
| starbound | |
| speedrun | |
| PostHardcore | |
| gameswap | |
| conspiratard | |
| DWMA | |
| fairytail | |
| saplings | |
| bostonceltics | |
| collapse | |
| ucla | |
| fitnesscirclejerk | |
| ShitRedditSays | |
| haikuOS | |
| bisexual | |
| italy | |
| CrossfitGirls | |
| LiverpoolFC | |
| IrishHistory | |
| ar15 | |
| 49ers | |
| Warhammer | |
| MilitaryPorn | |
| terriblefacebookmemes | |
| Filmmakers | |
| Homebrewing | |
| Volkswagen | |
| PoliticalDiscussion | |
| redditgetsdrawn | |
| DIY | |
| australia | |
| vinyl | |
| entgaming | |
| Augusta | |
| fantasybball | |
| StarWars | |
| chicago | |
| crochet | |
| bicycling | |
| wowraf | |
| translator | |
| bubbling | |
| gonewildaudio | |
| TrollXChromosomes | |
| SF4 | |
| Israel | |
| tall | |
| FuckYouImAShark | |
| engineering | |
| Cynicalbrit | |
| Archery | |
| japan | |
| Paleo | |
| CODZombies | |
| TheBluePill | |
| rawdenim | |
| redditblack | |
| unitedkingdom | |
| fifthworldproblems | |
| altnewz | |
| Finland | |
| AskScienceFiction | |
| learndota2 | |
| SVExchange | |
| ABraThatFits | |
| Seireitei | |
| MorbidReality | |
| boardgames | |
| arresteddevelopment | |
| PotterPlayRP | |
| languagelearning | |
| TalesFromRetail | |
| glassheads | |
| sneakermarket | |
| mac | |
| business | |
| Scotch | |
| hardware | |
| Reds | |
| chelseafc | |
| GBr4r | |
| totalwar | |
| kpop | |
| TrueReddit | |
| hometheater | |
| NASCAR | |
| Browns | |
| worldpolitics | |
| elderscrollsonline | |
| argentina | |
| happy | |
| summonerschool | |
| SourceFed | |
| Yogscast | |
| snowboarding | |
| food | |
| Gunpla | |
| polandball | |
| chillmusic | |
| 24hoursupport | |
| blog | |
| SocialEngineering | |
| Cricket | |
| DippingTobacco | |
| streetwear | |
| Art | |
| Playboy | |
| vegetarian | |
| Warthunder | |
| boston | |
| pcgaming | |
| MineZ | |
| nerdcubed | |
| MLS | |
| facepalm | |
| HouseOfLegacy | |
| whatsthatbook | |
| linguistics | |
| StopSelfHarm | |
| AnimalPorn | |
| Terraria | |
| xxfitness | |
| Metalcore | |
| mflb | |
| battlefield3 | |
| battlestations | |
| aggies | |
| TechNewsToday | |
| saudiarabia | |
| smallbusiness | |
| GaymersGoneMild | |
| Pets | |
| DebateReligion | |
| olympics | |
| networking | |
| Illustration | |
| college | |
| Unashamed | |
| EngineeringStudents | |
| beyondthebump | |
| AtheismComingOut | |
| modhelp | |
| talesfromtechsupport | |
| playrust | |
| HuntsvilleAlabama | |
| Flyers | |
| KitchenConfidential | |
| self | |
| asktransgender | |
| Sneakers | |
| CookieClicker | |
| askseddit | |
| thatHappened | |
| belgium | |
| FixedGearBicycle | |
| askscience | |
| katawashoujo | |
| homestuck | |
| history | |
| craigslist | |
| socialism | |
| sweden | |
| climbing | |
| firewater | |
| DeadBedrooms | |
| PrettyLittleLiars | |
| MMFB | |
| Bestof2011 | |
| Showerthoughts | |
| AskACountry | |
| acting | |
| Barca | |
| MosinNagant | |
| PandR | |
| gta5 | |
| Aquariums | |
| MTB | |
| LibertarianLeft | |
| childfree | |
| running | |
| RandomActsofMakeup | |
| ottawa | |
| doublespeakclique | |
| Anarchism | |
| freehugsmc | |
| FantasyPL | |
| norge | |
| visualnovels | |
| stopsmoking | |
| askgaybros | |
| ABDL | |
| Python | |
| Borderlands | |
| FIFA12 | |
| firefall | |
| tipofmytongue | |
| watchpeopledie | |
| Stance | |
| Antitheism | |
| kindle | |
| daddit | |
| pitbulls | |
| MachinePorn | |
| psychology | |
| Pokemongiveaway | |
| 52weeksofbaking | |
| Civcraft | |
| discgolf | |
| occupywallstreet | |
| OnOff | |
| WhatsInThisThing | |
| utdallas | |
| headphones | |
| Poetry | |
| bonnaroo | |
| Mommit | |
| cincinnati | |
| simracing | |
| HackEx | |
| applehelp | |
| CrohnsDisease | |
| Seattle | |
| dubstep | |
| ouya | |
| AssassinOrder | |
| FiftyFifty | |
| mildlyinfuriating | |
| DogeCoinPIF | |
| baltimore | |
| nsfw | |
| TagPro | |
| LondonGaymers | |
| minimalism | |
| femsub | |
| bartenders | |
| uofmn | |
| auslaw | |
| birthcontrol | |
| radiohead | |
| Libertarian | |
| Guitar | |
| Roadcam | |
| BarefootRunning | |
| SteamTradingCards | |
| webcomics | |
| 3DS | |
| FortCollins | |
| HermitCraft | |
| secretsanta | |
| scifi | |
| shittingadvice | |
| DJs | |
| occult | |
| marketing | |
| gopro | |
| AllThingsTerran | |
| Jazz | |
| syriancivilwar | |
| arma | |
| conspiro | |
| needamod | |
| reactiongifs | |
| feedthebeast | |
| CampKronosRP | |
| DragonsDogma | |
| LGBTrees | |
| aviation | |
| AnimalCrossing | |
| londonreal | |
| teslore | |
| foxes | |
| newzealand | |
| BitcoinMining | |
| Motocross | |
| forhire | |
| nuzlocke | |
| stopdrinking | |
| VegRecipes | |
| GoneWildCD | |
| cinematography | |
| Habs | |
| dogemining | |
| AMA | |
| MLPvids | |
| excel | |
| snes | |
| worldbuilding | |
| IAmAFiction | |
| masseffect | |
| greatNWside | |
| halloween | |
| Switzerland | |
| Juicing | |
| northtexasents | |
| YouShouldKnow | |
| Fantasy | |
| LANL_German | |
| shutupandtakemymoney | |
| DarkNetMarkets | |
| TheAwesomeSubreddit | |
| Steam | |
| Throwers | |
| FloridaMan | |
| SilkRoad | |
| Reformed | |
| solotravel | |
| ArcherFX | |
| mtgcube | |
| firefly | |
| Nexus7 | |
| TheSimpsons | |
| madmen | |
| Needafriend | |
| Health | |
| gamedev | |
| europe | |
| malehairadvice | |
| SMITEbug | |
| whiskey | |
| AnnArbor | |
| wifesharing | |
| Ingress | |
| r4r | |
| vexillology | |
| Trucks | |
| EQNext | |
| sto | |
| PS3 | |
| Fishing | |
| SVgiveaway | |
| fo3 | |
| lego | |
| Cardinals | |
| gallifrey | |
| redditgetsdrawnbadly | |
| SRSBusiness | |
| Dentistry | |
| USMCboot | |
| MetalMemes | |
| gamecollecting | |
| RandomActsOfPolish | |
| CFBUploads | |
| socialmedia | |
| orioles | |
| Sonsofanarchy | |
| Accounting | |
| diabetes | |
| dayzlol | |
| LongDistance | |
| community | |
| GiftofGames | |
| killingfloor | |
| windows | |
| xxketo | |
| futurebeats | |
| Paramore | |
| CatalogCrossing | |
| furry | |
| rugbyunion | |
| assassinscreed | |
| progun | |
| Marvel | |
| learnprogramming | |
| netflix | |
| ThriftStoreHauls | |
| damselsindistress | |
| discexchange | |
| gardening | |
| guineapigs | |
| okcupidcirclejerk | |
| homelab | |
| custommagic | |
| Frisson | |
| truetf2 | |
| PostgreSQL | |
| shortscarystories | |
| vaporents | |
| itookapicture | |
| SuicideWatch | |
| wikipedia | |
| Economics | |
| AskFeminists | |
| pakistan | |
| Denver | |
| roosterteeth | |
| TimAndEric | |
| PolishGauntlet | |
| Sprint | |
| FancyFollicles | |
| sharks | |
| InteriorDesign | |
| DowntonAbbey | |
| Objectivism | |
| Nootropics | |
| CanadianPolitics | |
| GoForGold | |
| Munich | |
| StateOfDecay | |
| popping | |
| KarakuraTown | |
| legaladvice | |
| wemetonline | |
| circlebroke | |
| tifu | |
| nostalgia | |
| OFWGKTA | |
| techsnap | |
| CampingandHiking | |
| Lollapalooza | |
| progmetal | |
| houston | |
| PoliticalHumor | |
| FoodPorn | |
| weightroom | |
| NYCapartments | |
| Coachella | |
| TheWire | |
| fountainpens | |
| blackops2 | |
| cats | |
| nexus4 | |
| fantasybaseball | |
| WildStar | |
| SuggestALaptop | |
| buffy | |
| poketradereferences | |
| pkmntcgtrades | |
| CollegeLPT | |
| MensRights | |
| TheHobbit | |
| nyc | |
| M43 | |
| Bitcoin | |
| opiates | |
| Bioshock | |
| dawngate | |
| mindcrackcirclejerk | |
| composer | |
| Team_Awesome | |
| thehungergames | |
| Hiphopcirclejerk | |
| RealGirls | |
| tappedout | |
| WritingPrompts | |
| bigbangtheory | |
| Rateme | |
| techsupportgore | |
| StockMarket | |
| gravityfalls | |
| femalefashionadvice | |
| supremeclothing | |
| palegirls | |
| incremental_games | |
| minecraftROK | |
| DoesAnybodyElse | |
| MechanicAdvice | |
| MST3K | |
| beards | |
| AskAcademia | |
| GYBB | |
| Parkour | |
| Brewers | |
| halifax | |
| surfing | |
| bigboobproblems | |
| COents | |
| ADHD | |
| FryeMadden | |
| Lesbients | |
| Redskins | |
| YGOBinders | |
| NSFW_GIF | |
| KDRAMA | |
| medicine | |
| awwnime | |
| inthenews | |
| 240sx | |
| GTAV | |
| drawing | |
| HIMYM | |
| earthbound | |
| navy | |
| croatia | |
| Debt | |
| FixMyUniversity | |
| explainlikeIAmA | |
| dbz | |
| howtonotgiveafuck | |
| JonTron | |
| Bravenewbies | |
| ploungeafterdark | |
| DenverBroncos | |
| premed | |
| AndroidQuestions | |
| computers | |
| gtamarketplace | |
| HPMOR | |
| MovieSuggestions | |
| stunfisk | |
| orangecounty | |
| TheWarZ | |
| gorillaz | |
| mylittleandysonic1 | |
| gonewildstories | |
| tea | |
| subaru | |
| indiegameswap | |
| spiral_knights | |
| AsianChicks | |
| rage | |
| scuba | |
| starbucks | |
| adventuretime | |
| Epilepsy | |
| Sherlock | |
| bodybuilding | |
| MECoOp | |
| mashups | |
| girls_smiling | |
| Physics | |
| Charlottesville | |
| thenukeandturtle | |
| PurplePillDebate | |
| TumblrInAction | |
| BuyItForLife | |
| desktops | |
| privacy | |
| cheatatmathhomework | |
| OutsideLands | |
| crossfit | |
| QuotesPorn | |
| BronyWeapons | |
| hardbodies | |
| thinspo | |
| chess | |
| Michigan | |
| Hypothyroidism | |
| ucf | |
| SaltLakeCity | |
| TalesFromYourServer | |
| TrueAskReddit | |
| sanfrancisco | |
| electricdaisycarnival | |
| horror | |
| cumsluts | |
| wedding | |
| Heroclix | |
| manga | |
| Sissies | |
| kittens | |
| BHOInfo | |
| Diablo3witchdoctors | |
| kungfu | |
| PHP | |
| opieandanthony | |
| lowendgaming | |
| classicalmusic | |
| beer | |
| minnesotavikings | |
| dragonage | |
| spartanrace | |
| LosAngeles | |
| NorseMythRP | |
| dogpictures | |
| zen | |
| screenshots | |
| GalaxyNexus | |
| GetStudying | |
| actuallesbians | |
| dcpu16 | |
| SRSMen | |
| FictionBrawl | |
| OldSchoolCool | |
| Quebec | |
| southpark | |
| uscg | |
| penpals | |
| buffalobills | |
| drumcorps | |
| SNSDrequest | |
| RedditBrigade | |
| porn | |
| comics | |
| aspergers | |
| RatchetAndClank | |
| Clarinet | |
| Boise | |
| Charlotte | |
| edmproduction | |
| UniversityofReddit | |
| VirtualWDCPC | |
| shittyfoodporn | |
| SWORDS | |
| gamingsuggestions | |
| LiveFromNewYork | |
| windows8 | |
| compsci | |
| melbourne | |
| freebies | |
| litecoin | |
| Luthier | |
| CourtroomJustice | |
| glitch_art | |
| bravelydefault | |
| auburn | |
| mining | |
| Eugene | |
| SkincareAddiction | |
| CanadianForces | |
| INTP | |
| neopets | |
| ifyoulikeblank | |
| cardfightvanguard | |
| Greyhounds | |
| shrooms | |
| FordPiDayChallenge | |
| cosplay | |
| nononono | |
| crafts | |
| Seahawks | |
| TrueBlood | |
| standupshots | |
| borussiadortmund | |
| Degrassi | |
| HistoryPorn | |
| AbandonedPorn | |
| democrats | |
| TheStopGirl | |
| needadvice | |
| LasNoches | |
| ProtectAndServe | |
| Warframe | |
| firstworldproblems | |
| Thetruthishere | |
| Teachers | |
| GameDeals | |
| ClashOfClans | |
| redditdynasty2 | |
| NCIS | |
| BDSMcommunity | |
| LeagueofLegendsMeta | |
| architecture | |
| Supernatural | |
| cscareerquestions | |
| jerktalkdiamond | |
| NewsOfTheWeird | |
| Slender_Man | |
| HITsWorthTurkingFor | |
| nottheonion | |
| askponies | |
| ProgrammerHumor | |
| france | |
| ps3bf3 | |
| CalgaryFlames | |
| Disney_Infinity | |
| barista | |
| Vocaloid | |
| kratom | |
| Scotland | |
| HalfLife | |
| autism | |
| mechwarrior | |
| AskEngineers | |
| csshelp | |
| gainit | |
| ultrahardcore | |
| flashlight | |
| leangains | |
| MyLittleFriends | |
| manhwa | |
| makemychoice | |
| feet | |
| Rockband | |
| DMB | |
| archlinux | |
| paradoxplaza | |
| coys | |
| wiiu | |
| Pegging | |
| fakeid | |
| ClopClop | |
| Columbus | |
| NewYorkMets | |
| litecoinmining | |
| forza | |
| BabyExchange | |
| threesongs | |
| MustangTech | |
| LibSocMC_Talk | |
| gamernews | |
| Coffee | |
| Edinburgh | |
| LoLCodeTrade | |
| French | |
| Naruto | |
| psbattleroyale | |
| Diablo3Strategy | |
| Denmark | |
| rollerderby | |
| dvdcollection | |
| SecurityAnalysis | |
| AnimalCollective | |
| Catholicism | |
| C_Programming | |
| sociology | |
| Entrepreneur | |
| latin | |
| reddevils | |
| timetravel | |
| emulation | |
| Cuckold | |
| ssbbw | |
| mtred | |
| pacers | |
| RWF | |
| Cooking | |
| androidcirclejerk | |
| metanarchism | |
| environmental_science | |
| steampunk | |
| chastity | |
| InternetAMA | |
| blender | |
| BeautyBoxes | |
| Whatcouldgowrong | |
| xkcd | |
| MCFC | |
| devils | |
| christianyouth | |
| MotoX | |
| MapPorn | |
| DaystromInstitute | |
| Spore | |
| SHHHHHEEEEEEEEIIIITT | |
| coins | |
| bikewrench | |
| IWantOut | |
| avengedsevenfold | |
| CrappyDesign | |
| FreeKarma | |
| mead | |
| triangle | |
| musicproduction | |
| punk | |
| geology | |
| seduction | |
| Sat | |
| geek | |
| Dogtraining | |
| buildapcforme | |
| superman | |
| mctourney | |
| thegdstudio | |
| randomactsofamazon | |
| camping | |
| paintball | |
| lingerie | |
| userbattles | |
| ECE | |
| TeacherTales | |
| SketchDaily | |
| mycology | |
| publichealth | |
| mw3 | |
| GTA | |
| neuro | |
| InternetIsBeautiful | |
| Hawaii | |
| 2XLookbook | |
| guildrecruitment | |
| htcone | |
| southpaws | |
| WeAreTheMusicMakers | |
| MineZtradingpost | |
| dykesgonewild | |
| ChivalryGame | |
| Arx_Republicus | |
| insanityworkout | |
| learnmath | |
| buildapcsales | |
| electronics | |
| VivillonExchange | |
| Enhancement | |
| Insurance | |
| starcitizen | |
| ColoradoAvalanche | |
| space | |
| stencils | |
| Edmonton | |
| makinghiphop | |
| SelfSufficiency | |
| mylittlehuman | |
| dpdr | |
| metaTSG | |
| Colts | |
| rance | |
| comedy | |
| vertcoin | |
| entertainment | |
| TheRedLion | |
| dykesgonemild | |
| Demotivational | |
| drunk | |
| 90daysgoal | |
| atletico | |
| EarthPorn | |
| LawSchool | |
| upstate_new_york | |
| Megaten | |
| NHLHUT | |
| Miniswap | |
| knitting | |
| TombRaider | |
| Norway | |
| buccos | |
| TheoryOfReddit | |
| TeenMFA | |
| Hotchickswithtattoos | |
| rant | |
| carcrash | |
| Braveryjerk | |
| FL_Studio | |
| Vivillon | |
| BannedFrom4chan | |
| Mountaineering | |
| shroomers | |
| ALBERTHA | |
| thelastofus | |
| sandwaterfalls | |
| ontario | |
| dailyprogrammer | |
| BurningMan | |
| listentothis | |
| spaceporn | |
| Minecraft360 | |
| investing | |
| RapeSquadKillas | |
| general | |
| Psychonaut | |
| heraldry | |
| TheGirlSurvivalGuide | |
| Neverwinter | |
| lehighvalley | |
| sixers | |
| iOSthemes | |
| athiesm | |
| EDC | |
| circlejerkbreakingbad | |
| SEGAGENESIS | |
| tallfashionadvice | |
| ShotBow | |
| androiddev | |
| KarmaCourt | |
| RandomGoodCompanies | |
| cocktails | |
| germanshepherds | |
| gif | |
| audiophile | |
| ratemygains | |
| NoStupidQuestions | |
| video | |
| Miscarriage | |
| Sabermetrics | |
| gay | |
| DOTA | |
| Watercolor | |
| UVA | |
| starforge | |
| mileycyrus | |
| AmISexy | |
| relationship_advice | |
| Patriots | |
| Supplements | |
| redstatereds | |
| Crainn | |
| ferrets | |
| MMORPG | |
| spotters | |
| RATS | |
| portlandtrees | |
| singapore | |
| turning | |
| festivals | |
| Screenwriting | |
| sustainability | |
| DenverCirclejerk | |
| UBC | |
| AnimeFigures | |
| sailormoon | |
| stocks | |
| AskPhysics | |
| whitewater | |
| POLITIC | |
| theworldnews | |
| hsxc | |
| cantsleep | |
| unt | |
| CrusaderKings | |
| ronpaul | |
| bmx | |
| touhou | |
| SacramentoKings | |
| environment | |
| SkyrimPorn | |
| HVAC | |
| ConnectedCareers | |
| LatvianJokes | |
| Atlanta | |
| guitarlessons | |
| mwo | |
| ExpectationVsReality | |
| DebateAChristian | |
| dreamcraftMC | |
| BeardedDragons | |
| ipad | |
| RustiaTownWars | |
| beertrade | |
| linux4noobs | |
| LSD | |
| RedditDayOf | |
| SoccerBetting | |
| burial | |
| supermoto | |
| Stacked | |
| tdu2 | |
| AlbumArtPorn | |
| auto | |
| ShouldIbuythisgame | |
| incest | |
| CableManagement | |
| nin | |
| askdrugs | |
| xmen | |
| NewOrleans | |
| canucks | |
| titanfall | |
| TransMLP | |
| hentai | |
| Dodge | |
| brocourt | |
| OneY | |
| BreakUps | |
| Breadit | |
| Maplestory | |
| eastboundanddown | |
| Chinacoin | |
| pickling | |
| hockeyquestionmark | |
| findareddit | |
| gaybrosgonemild | |
| nerdfighters | |
| Podiatry | |
| budgetfood | |
| trap | |
| losangeleskings | |
| TheNarutoWorld | |
| nutrition | |
| singularity | |
| goprodiy | |
| TAS | |
| regularshow | |
| GuerrillaGardening | |
| startups | |
| futurebeatproducers | |
| treedibles | |
| glutenfree | |
| TwinCities | |
| AndroidGaming | |
| Standup | |
| learnanimation | |
| norfolk | |
| Republican | |
| HPfanfiction | |
| detroitlions | |
| bjj | |
| WhereDoIStart | |
| farming | |
| chinabookclub | |
| jeddit | |
| uspolitics | |
| CubeWorld | |
| Glitch_in_the_Matrix | |
| TalesofPrivilege | |
| bestofbronze | |
| Welding | |
| dirtysmall | |
| t:yesterday | |
| donaldglover | |
| androidapps | |
| homeland | |
| Futurology | |
| FinalFantasy | |
| suits | |
| marchingband | |
| LasVegas | |
| geologyporn | |
| qotsa | |
| CastleTV | |
| ukpolitics | |
| caps | |
| mcpublic | |
| antisrs | |
| Dallas | |
| WeAreTheFilmMakers | |
| CompetitiveHalo | |
| hodor | |
| alternativeart | |
| Fencing | |
| RedPillWomen | |
| providence | |
| hawkthorne | |
| ems | |
| RPI | |
| literature | |
| Buddhism | |
| RatRod | |
| lost | |
| slashdiablo | |
| LawEnforcement | |
| badhistory | |
| BitcoinMarkets | |
| neverwet | |
| thesims | |
| OpiatesRecovery | |
| bloodlinechampions | |
| sanantonio | |
| Hairporn | |
| martialarts | |
| penguins | |
| DrugNerds | |
| trianglegamers | |
| roblox | |
| webdev | |
| memes | |
| JoeRogan | |
| Glocks | |
| memphisgrizzlies | |
| Pathfinder_RPG | |
| FRAGCAKECHALLENGE | |
| KindVoice | |
| byu | |
| Dreadlocks | |
| Georgia | |
| GameDealsMeta | |
| MusicCirclejerk | |
| law | |
| PS2Ceres | |
| arrow | |
| asianpeoplegifs | |
| Destiny | |
| Hungercraft | |
| chemicalreactiongifs | |
| smnc | |
| ACDuplication | |
| wildhockey | |
| marvelstudios | |
| CastleStory | |
| AskElectronics | |
| blunderyears | |
| futuregarage | |
| LucidDreaming | |
| BostonBruins | |
| Bozeman | |
| wince | |
| Punny | |
| radiocontrol | |
| darksoulsbuilds | |
| leafs | |
| UFOs | |
| Spanking | |
| energy_work | |
| vintageaudio | |
| Paranormal | |
| SRSTelevision | |
| xbox360 | |
| nhl | |
| bf4emblems | |
| BMW | |
| ScientismToday | |
| facebookwins | |
| dataisbeautiful | |
| notinteresting | |
| ImaginaryLandscapes | |
| Hotwife | |
| mmgirls | |
| dota2loungebets | |
| whowouldwin | |
| bodyweightfitness | |
| P90X | |
| Toonami | |
| northernireland | |
| Borderlands2 | |
| swrpg | |
| NotaMethAddict | |
| China | |
| kansascity | |
| farscape | |
| knives | |
| boatporn | |
| baseballcirclejerk | |
| pugs | |
| TF2fashionadvice | |
| weedstocks | |
| MURICA | |
| AntiJokes | |
| Detroit | |
| shill | |
| SouthDakota | |
| unfilter | |
| HairyPussy | |
| Hardcore | |
| realmadrid | |
| DebateAnarchism | |
| audioengineering | |
| GuessTheMovie | |
| Dachshund | |
| strife | |
| IDAP | |
| uglyduckling | |
| frugalmalefashion | |
| RUG_Leeds | |
| futurama | |
| curvy | |
| Vauxhall | |
| statenisland | |
| LeagueOfMemes | |
| Braves | |
| lotr | |
| shittyama | |
| Liftingmusic | |
| desmoines | |
| whatisthisthing | |
| CasualPokemonTrades | |
| StarcraftCirclejerk | |
| darknetplan | |
| suicideprevention | |
| eldertrees | |
| TheStrokes | |
| lostgeneration | |
| poppunkers | |
| mueflair | |
| fcs | |
| StonerEngineering | |
| modelmakers | |
| GirlGamers | |
| ELINT | |
| SouthBend | |
| grandrapids | |
| malefashion | |
| powerrangers | |
| storage | |
| athensohio | |
| gtaonline | |
| crappymusic | |
| pornfree | |
| Eminem | |
| respectthreads | |
| asmr | |
| vertical | |
| bioinformatics | |
| lol | |
| autofellatio | |
| crossdressing | |
| shoegaze | |
| flatcore | |
| RealEstate | |
| podcasts | |
| fantasyhockey | |
| IndieGaming | |
| Favors | |
| hiking | |
| oxford | |
| Chargers | |
| PloungeMafia | |
| askphilosophy | |
| Indoctrinated | |
| beardytown | |
| GW2EU | |
| london | |
| everymanshouldknow | |
| speedpaint | |
| appstate | |
| shittyadviceanimals | |
| indie_rock | |
| Tribes | |
| jakeandamir | |
| design_critiques | |
| Baking | |
| dogebetting | |
| BTFC | |
| funpiece | |
| castiron | |
| Offensive_Wallpapers | |
| Tacoma | |
| HoustonSocials | |
| rocksmith | |
| redheads | |
| wallstreetbets | |
| Pieces | |
| onetruegod | |
| howardstern | |
| DebateaCommunist | |
| smitetraining | |
| summerfilmcontest | |
| Gaming4Gamers | |
| SonicTheHedgehog | |
| CCW | |
| DnB | |
| WomenOfColour | |
| acne | |
| aoe2 | |
| Tennesseetitans | |
| bipolar | |
| minnesota | |
| SRSMeta | |
| sloths | |
| logophilia | |
| Shinecraft | |
| atheismcss | |
| umass | |
| Warhammer40k | |
| vfx | |
| NewToTF2 | |
| lewronggeneration | |
| Nexus5 | |
| css | |
| ElPaso | |
| sips | |
| shittyaskscience | |
| chiliadmystery | |
| Mustang | |
| Drama | |
| southafrica | |
| CHART_BOT | |
| ChristianCreationists | |
| shittyTESlore | |
| LetsChat | |
| MAME | |
| MvC3 | |
| PokemonoftheWeek | |
| comiccodes | |
| Anxiety | |
| bestofMLP | |
| bootroom | |
| FreckledGirls | |
| LetsNotMeet | |
| treesgonewild | |
| armadev | |
| zombies | |
| Torontobluejays | |
| playitforward | |
| nongolfers | |
| TryingForABaby | |
| askaconservative | |
| RandomKindness | |
| typography | |
| Hammocks | |
| Lexus | |
| drugmemes | |
| PropagandaPosters | |
| IronbornCiv | |
| steroids | |
| beermoney | |
| Fifa13 | |
| GlobalOffensiveTrade | |
| Makemeagif | |
| selfharmpics | |
| cyclocross | |
| CGPGrey | |
| HistoryofIdeas | |
| againstmensrights | |
| misleadingthumbnails | |
| ColorBlind | |
| TrueChristian | |
| Brooklyn | |
| oklahoma | |
| uwaterloo | |
| MelanieLaurent | |
| BUGhunt | |
| Colorado | |
| openbra | |
| NUFC | |
| 911truth | |
| editors | |
| evangelion | |
| C25K | |
| shield | |
| no_sob_story | |
| BBQ | |
| fsu | |
| thick | |
| sailing | |
| savageworlds | |
| MakeNewFriendsHere | |
| skiing | |
| beerporn | |
| OrlandoMagic | |
| aves | |
| Stargate | |
| rva | |
| MaddenCC | |
| tampa | |
| ripcity | |
| dayzlfg | |
| homeschool | |
| gamedesign | |
| milwaukee | |
| killzonemercenary | |
| linuxadmin | |
| ColoradoSprings | |
| CannabisExtracts | |
| hugeboobs | |
| Ginden | |
| IBO | |
| ragenovels | |
| greenday | |
| htpc | |
| AskHistory | |
| phillies | |
| introvert | |
| spotify | |
| NBASpurs | |
| dominicans | |
| dating_advice |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| first | |
| time | |
| acid | |
| usually | |
| bust | |
| whaaaat | |
| gonna | |
| trip | |
| balls | |
| slightly | |
| trippin | |
| hours | |
| later | |
| drive | |
| other | |
| hand | |
| single | |
| good | |
| vehicle | |
| sure | |
| smart | |
| cars | |
| like | |
| wonderful | |
| city | |
| folks | |
| they | |
| handle | |
| heavy | |
| haul | |
| farmer | |
| family | |
| kids | |
| impossible | |
| solution | |
| general | |
| purpose | |
| problems | |
| know | |
| manasota | |
| culture | |
| found | |
| state | |
| florida | |
| website | |
| might | |
| able | |
| guide | |
| terms | |
| resources | |
| dunno | |
| take | |
| look | |
| http | |
| flheritage | |
| facts | |
| reports | |
| contexts | |
| wwwcpgc | |
| useful | |
| remember | |
| archaeology | |
| sometimes | |
| subjects | |
| simply | |
| haven | |
| researched | |
| heavily | |
| long | |
| best | |
| references | |
| number | |
| years | |
| ways | |
| determine | |
| anything | |
| newer | |
| find | |
| most | |
| recent | |
| article | |
| possibly | |
| refers | |
| cite | |
| thing | |
| citing | |
| chances | |
| last | |
| substantive | |
| published | |
| topic | |
| site | |
| discuss | |
| sites | |
| recently | |
| excavated | |
| firms | |
| probably | |
| going | |
| generally | |
| easily | |
| accessible | |
| available | |
| also | |
| worth | |
| asking | |
| your | |
| professor | |
| position | |
| more | |
| stuff | |
| word | |
| looking | |
| sanctuary | |
| playing | |
| tennessee | |
| here | |
| interesting | |
| bikes | |
| arnt | |
| really | |
| popular | |
| vintage | |
| collectors | |
| still | |
| great | |
| mountain | |
| motorcycle | |
| intercepter | |
| beautiful | |
| need | |
| sparrow | |
| takes | |
| care | |
| push | |
| every | |
| star | |
| trek | |
| makes | |
| reference | |
| intergalactic | |
| travel | |
| wonder | |
| ever | |
| watched | |
| show | |
| right | |
| thanks | |
| edited | |
| post | |
| suuure | |
| didnt | |
| gretchen | |
| redo | |
| glad | |
| someone | |
| sense | |
| humor | |
| took | |
| tests | |
| same | |
| results | |
| parents | |
| didn | |
| them | |
| though | |
| sent | |
| awesome | |
| school | |
| principal | |
| unfortunately | |
| left | |
| complete | |
| lack | |
| direction | |
| absolutely | |
| idea | |
| want | |
| life | |
| except | |
| tried | |
| surf | |
| fucking | |
| awful | |
| damn | |
| test | |
| lied | |
| again | |
| willing | |
| money | |
| eating | |
| enough | |
| well | |
| granted | |
| work | |
| intense | |
| free | |
| slime | |
| advances | |
| towards | |
| violently | |
| thrashing | |
| arms | |
| room | |
| advantage | |
| otherwise | |
| perfect | |
| humidifier | |
| unexpected | |
| events | |
| occur | |
| ygritte | |
| says | |
| knows | |
| nothing | |
| think | |
| freaking | |
| sexy | |
| judgejudy | |
| form | |
| submit | |
| case | |
| fill | |
| shows | |
| producers | |
| writers | |
| then | |
| lucky | |
| call | |
| back | |
| people | |
| contact | |
| court | |
| system | |
| filed | |
| through | |
| notify | |
| mediation | |
| instead | |
| hearing | |
| friend | |
| made | |
| sexual | |
| orientation | |
| defamation | |
| some | |
| representative | |
| thought | |
| ratings | |
| coffee | |
| dead | |
| prefer | |
| pick | |
| hearse | |
| confident | |
| winning | |
| lose | |
| pretty | |
| disappointed | |
| underperform | |
| rest | |
| tournament | |
| stunning | |
| afraid | |
| anywhere | |
| likely | |
| leave | |
| nestling | |
| safely | |
| inside | |
| three | |
| boxes | |
| attention | |
| uses | |
| strong | |
| gets | |
| tired | |
| guys | |
| gain | |
| until | |
| cash | |
| transfer | |
| into | |
| something | |
| else | |
| wrong | |
| watching | |
| price | |
| rise | |
| power | |
| ther | |
| bitcoin | |
| than | |
| speculation | |
| keen | |
| investment | |
| opportunities | |
| within | |
| market | |
| wait | |
| merge | |
| line | |
| onto | |
| highway | |
| whole | |
| dotted | |
| point | |
| minute | |
| road | |
| stupid | |
| twats | |
| sorry | |
| yolo | |
| shirts | |
| dont | |
| matter | |
| unless | |
| wanting | |
| sell | |
| spring | |
| ergo | |
| guess | |
| cyclical | |
| came | |
| everyone | |
| shouting | |
| dump | |
| couple | |
| small | |
| correction | |
| requires | |
| intended | |
| application | |
| framework | |
| servlet | |
| container | |
| needs | |
| endpoints | |
| components | |
| springmvc | |
| derp | |
| around | |
| darkroot | |
| garden | |
| percy | |
| harvin | |
| similar | |
| anthony | |
| thomas | |
| whenever | |
| touches | |
| ball | |
| expect | |
| score | |
| short | |
| transition | |
| four | |
| weeks | |
| edit | |
| nevermind | |
| read | |
| closely | |
| slow | |
| down | |
| taping | |
| knock | |
| wrap | |
| make | |
| keep | |
| tape | |
| oiled | |
| automatic | |
| cool | |
| watch | |
| shame | |
| shoddy | |
| quality | |
| control | |
| stories | |
| floating | |
| internet | |
| stretch | |
| eyes | |
| decay | |
| quickly | |
| hearig | |
| become | |
| develop | |
| further | |
| happens | |
| blind | |
| flying | |
| arriving | |
| complaining | |
| another | |
| flight | |
| wider | |
| seat | |
| firstworldproblems | |
| hear | |
| sociology | |
| food | |
| alright | |
| help | |
| actually | |
| implying | |
| close | |
| being | |
| equal | |
| least | |
| acting | |
| username | |
| mispronounced | |
| doob | |
| lynn | |
| capital | |
| ireland | |
| worries | |
| mostly | |
| puzzled | |
| taken | |
| vista | |
| ridge | |
| trail | |
| cove | |
| interested | |
| sponsor | |
| stay | |
| tutorial | |
| asks | |
| next | |
| tell | |
| coming | |
| picturing | |
| eric | |
| boullier | |
| passes | |
| horner | |
| paddock | |
| slowly | |
| shaking | |
| head | |
| mozilla | |
| seem | |
| taking | |
| bigger | |
| interest | |
| protecting | |
| privacy | |
| comes | |
| products | |
| looks | |
| definitely | |
| favorite | |
| cities | |
| world | |
| between | |
| beaches | |
| music | |
| women | |
| preserved | |
| wilderness | |
| mountains | |
| popping | |
| everywhere | |
| amazing | |
| place | |
| safest | |
| times | |
| getting | |
| better | |
| canadian | |
| confirm | |
| fuck | |
| esea | |
| ride | |
| invest | |
| pair | |
| cycling | |
| bibs | |
| blow | |
| mind | |
| conspiracy | |
| context | |
| girlfriend | |
| finally | |
| spent | |
| night | |
| making | |
| morning | |
| begins | |
| funny | |
| comments | |
| mistake | |
| message | |
| compose | |
| karmaconspiracy | |
| subject | |
| error | |
| chick | |
| picture | |
| hair | |
| accessory | |
| correct | |
| link | |
| rkarmaconspiracy | |
| officially | |
| leaked | |
| means | |
| advice | |
| nope | |
| kill | |
| using | |
| strategy | |
| solo | |
| legendary | |
| hard | |
| difficult | |
| never | |
| reach | |
| imgur | |
| reclaimed | |
| trees | |
| missed | |
| tattoos | |
| kurt | |
| somewhere | |
| yeah | |
| warphalange | |
| spare | |
| anyone | |
| wants | |
| earth | |
| easier | |
| dynamic | |
| demo | |
| players | |
| literally | |
| standing | |
| spot | |
| hitting | |
| mobs | |
| convinced | |
| please | |
| agreed | |
| feel | |
| elementz | |
| keeping | |
| alive | |
| edward | |
| level | |
| totally | |
| agree | |
| blend | |
| together | |
| name | |
| floridian | |
| fear | |
| closest | |
| real | |
| poutine | |
| laugh | |
| loud | |
| upvote | |
| argue | |
| kvothe | |
| tvtropes | |
| pmwiki | |
| main | |
| theace | |
| marty | |
| martystu | |
| order | |
| empty | |
| plastic | |
| action | |
| hero | |
| doll | |
| readers | |
| project | |
| wishful | |
| selves | |
| believe | |
| response | |
| linked | |
| many | |
| arguments | |
| fallacious | |
| based | |
| ignoring | |
| major | |
| parts | |
| explanations | |
| books | |
| excerpts | |
| chooses | |
| criticize | |
| judged | |
| completely | |
| isolation | |
| deliberately | |
| already | |
| liberty | |
| majority | |
| counter | |
| comment | |
| replies | |
| user | |
| mism | |
| refer | |
| argument | |
| cboye | |
| wordpress | |
| mary | |
| study | |
| closing | |
| arrogant | |
| quite | |
| cruel | |
| liar | |
| thief | |
| things | |
| consciously | |
| suffers | |
| setbacks | |
| result | |
| flaws | |
| character | |
| learns | |
| grows | |
| throughout | |
| cannot | |
| described | |
| quest | |
| game | |
| give | |
| fair | |
| chance | |
| each | |
| religious | |
| spirit | |
| positive | |
| appreciate | |
| however | |
| seen | |
| friends | |
| face | |
| said | |
| diagnosed | |
| cancer | |
| transferred | |
| during | |
| grade | |
| regular | |
| schedule | |
| summers | |
| year | |
| round | |
| uber | |
| class | |
| farted | |
| total | |
| silence | |
| hate | |
| entire | |
| thread | |
| teddiursa | |
| spainarak | |
| even | |
| senators | |
| holding | |
| phones | |
| shoving | |
| faces | |
| especially | |
| wouldn | |
| midnight | |
| doesn | |
| invitation | |
| boobs | |
| dude | |
| required | |
| always | |
| chuckle | |
| photoshopped | |
| version | |
| kitten | |
| chased | |
| domo | |
| varget | |
| buddy | |
| exact | |
| rifle | |
| specs | |
| barrel | |
| finish | |
| defiance | |
| hell | |
| anyway | |
| lapua | |
| brass | |
| lands | |
| fgmm | |
| primers | |
| problem | |
| fact | |
| wise | |
| move | |
| blades | |
| likeliness | |
| finding | |
| suitable | |
| blade | |
| serious | |
| trolling | |
| lots | |
| blatantly | |
| untrue | |
| common | |
| usage | |
| come | |
| favour | |
| meaning | |
| over | |
| archaically | |
| mean | |
| both | |
| does | |
| custom | |
| usual | |
| handicap | |
| pitted | |
| stone | |
| aught | |
| none | |
| others | |
| tenuous | |
| multiply | |
| divide | |
| oven | |
| freezer | |
| implies | |
| heating | |
| cooling | |
| tyrells | |
| allied | |
| stannis | |
| since | |
| easy | |
| influence | |
| renly | |
| lannisters | |
| gotten | |
| support | |
| vale | |
| littlefinger | |
| clear | |
| military | |
| wearing | |
| friday | |
| october | |
| leaving | |
| poppy | |
| cenotaph | |
| relatively | |
| customary | |
| stop | |
| minutes | |
| youre | |
| living | |
| past | |
| haha | |
| raiding | |
| mint | |
| doing | |
| smuggling | |
| operation | |
| side | |
| missions | |
| genki | |
| tank | |
| mayhem | |
| reckon | |
| lobotomy | |
| appears | |
| removed | |
| canadapolitics | |
| either | |
| spam | |
| filter | |
| moderators | |
| christy | |
| clark | |
| vote | |
| today | |
| count | |
| names | |
| ballot | |
| clarks | |
| submitted | |
| hour | |
| methoxyethane | |
| missing | |
| search | |
| sort | |
| restrict | |
| removal | |
| politicalmoderation | |
| title | |
| created | |
| upvotes | |
| downvotes | |
| entering | |
| spoke | |
| spoken | |
| answer | |
| less | |
| pulled | |
| aside | |
| computer | |
| generated | |
| random | |
| crap | |
| canada | |
| went | |
| shopping | |
| aaaaand | |
| shirt | |
| nice | |
| woman | |
| weighs | |
| pounds | |
| size | |
| weigh | |
| ditto | |
| speed | |
| start | |
| doritos | |
| live | |
| dogetipbot | |
| doge | |
| verify | |
| lookin | |
| fight | |
| once | |
| drew | |
| bunch | |
| dicks | |
| subliminal | |
| cocks | |
| called | |
| experiment | |
| exposed | |
| fingers | |
| monitors | |
| worker | |
| recorded | |
| phallic | |
| week | |
| conversation | |
| laughed | |
| rose | |
| average | |
| army | |
| kind | |
| talk | |
| among | |
| enlisted | |
| personnel | |
| track | |
| seriously | |
| inform | |
| immediately | |
| broken | |
| winsupersite | |
| files | |
| archive | |
| content | |
| tangible | |
| meaningful | |
| dedicate | |
| himself | |
| beaver | |
| slang | |
| term | |
| female | |
| reproductive | |
| organ | |
| click | |
| subm | |
| obama | |
| bush | |
| youtube | |
| filipinos | |
| tend | |
| plump | |
| older | |
| compared | |
| asian | |
| peoples | |
| fatty | |
| feeding | |
| desserts | |
| frequent | |
| sweet | |
| treats | |
| healthier | |
| option | |
| weightlifting | |
| yummy | |
| company | |
| apartments | |
| bronx | |
| units | |
| vacated | |
| rent | |
| controlled | |
| tenant | |
| previous | |
| owner | |
| building | |
| file | |
| increases | |
| along | |
| loves | |
| puns | |
| person | |
| book | |
| christmas | |
| listen | |
| feels | |
| festive | |
| moar | |
| hdrrrrrrrrr | |
| fashion | |
| experts | |
| righteo | |
| freedom | |
| speech | |
| exist | |
| lets | |
| shift | |
| trivial | |
| horse | |
| high | |
| clouds | |
| idiot | |
| walterwhitesp | |
| golden | |
| stopped | |
| drinking | |
| soda | |
| juices | |
| often | |
| noticed | |
| without | |
| sugar | |
| alert | |
| addition | |
| water | |
| hungry | |
| changes | |
| improved | |
| everyday | |
| energy | |
| health | |
| simplify | |
| ruby | |
| sapphire | |
| emerald | |
| dimensions | |
| sqrt | |
| scalar | |
| apologised | |
| galore | |
| half | |
| percent | |
| orthodox | |
| jews | |
| portion | |
| popcorn | |
| explains | |
| jack | |
| honestly | |
| downvote | |
| mockery | |
| preluding | |
| phrase | |
| regarding | |
| karma | |
| statement | |
| sincerely | |
| hope | |
| heart | |
| tanya | |
| huff | |
| quarters | |
| series | |
| fantasy | |
| urban | |
| posts | |
| tagpro | |
| dick | |
| screenshots | |
| spammy | |
| aight | |
| inte | |
| mycket | |
| blir | |
| exalterad | |
| buss | |
| diket | |
| research | |
| behavior | |
| animal | |
| physically | |
| taoism | |
| nature | |
| having | |
| played | |
| sierra | |
| games | |
| nostalgic | |
| gaming | |
| standards | |
| history | |
| touched | |
| dynamix | |
| btoodpf | |
| feature | |
| gdata | |
| player | |
| smell | |
| rabbit | |
| busy | |
| property | |
| jesus | |
| christ | |
| couldn | |
| true | |
| seahawk | |
| becomes | |
| particularly | |
| effective | |
| bonuses | |
| precision | |
| critical | |
| damage | |
| neglecting | |
| burst | |
| lower | |
| staff | |
| consider | |
| potential | |
| output | |
| group | |
| target | |
| blasting | |
| trait | |
| pleased | |
| superior | |
| abilities | |
| glass | |
| cannon | |
| elementalist | |
| builds | |
| warrior | |
| utility | |
| across | |
| weapons | |
| arguably | |
| play | |
| effort | |
| xbmc | |
| screen | |
| toast | |
| popup | |
| edge | |
| pollocks | |
| logic | |
| generalized | |
| therefore | |
| thinking | |
| premise | |
| meaningless | |
| points | |
| whoa | |
| greyhound | |
| leash | |
| fence | |
| recall | |
| beauty | |
| witnessed | |
| udyrs | |
| wanna | |
| nunu | |
| needles | |
| failed | |
| terribly | |
| blaming | |
| team | |
| thankfully | |
| banned | |
| fernseh | |
| versteht | |
| noch | |
| aber | |
| trotzdem | |
| sieht | |
| dialektbehaftet | |
| schweizer | |
| sind | |
| oder | |
| hatte | |
| verlieren | |
| sich | |
| deutsche | |
| hineinzumischen | |
| deutscher | |
| grenzgebiet | |
| konstanz | |
| warum | |
| ganz | |
| leicht | |
| schwitzelt | |
| charges | |
| denote | |
| successfully | |
| invited | |
| unsub | |
| lame | |
| garbage | |
| posted | |
| gone | |
| relax | |
| giving | |
| puppy | |
| dogs | |
| home | |
| paid | |
| donation | |
| organization | |
| experienced | |
| hospice | |
| dying | |
| loved | |
| irresponsible | |
| ones | |
| bidding | |
| done | |
| current | |
| plane | |
| moment | |
| items | |
| hack | |
| blame | |
| speeding | |
| soul | |
| farming | |
| tedious | |
| repeditive | |
| thank | |
| honest | |
| dates | |
| self | |
| esteem | |
| raised | |
| lowered | |
| numb | |
| myself | |
| girls | |
| visited | |
| profile | |
| send | |
| browser | |
| copy | |
| paste | |
| kinect | |
| gathers | |
| data | |
| second | |
| transmitting | |
| unlimited | |
| plans | |
| limit | |
| assuming | |
| sending | |
| compressed | |
| video | |
| audio | |
| typical | |
| teleconferencing | |
| systems | |
| bare | |
| minimum | |
| understood | |
| bitrate | |
| besides | |
| microsoft | |
| storing | |
| processing | |
| huge | |
| struggle | |
| xbox | |
| uptime | |
| containing | |
| secret | |
| network | |
| codes | |
| signing | |
| tummy | |
| pussy | |
| finger | |
| bangs | |
| film | |
| adaption | |
| story | |
| summer | |
| sale | |
| gabe | |
| rich | |
| register | |
| party | |
| aware | |
| benefit | |
| registered | |
| member | |
| allowed | |
| primary | |
| elections | |
| stable | |
| viper | |
| withhold | |
| judgements | |
| jerk | |
| detail | |
| nasus | |
| picked | |
| farm | |
| kinda | |
| irrelevent | |
| acctually | |
| surviavablity | |
| penetrate | |
| sololane | |
| important | |
| gold | |
| tankyness | |
| survive | |
| jungle | |
| creep | |
| kills | |
| auto | |
| attack | |
| reset | |
| awwww | |
| thanx | |
| fucked | |
| reason | |
| wallpaper | |
| aero | |
| clue | |
| words | |
| jokes | |
| obviously | |
| business | |
| experimenting | |
| worse | |
| sound | |
| fatigue | |
| accurate | |
| assess | |
| biological | |
| mechanical | |
| markers | |
| saying | |
| assessment | |
| posting | |
| yourself | |
| expert | |
| complicated | |
| helps | |
| symptom | |
| claim | |
| reaction | |
| occurs | |
| gunna | |
| treat | |
| different | |
| seemed | |
| aggressive | |
| angry | |
| realize | |
| curse | |
| snarky | |
| clothes | |
| silly | |
| laughing | |
| chronos | |
| agni | |
| stream | |
| inventory | |
| needed | |
| thrown | |
| furnace | |
| logan | |
| fixing | |
| obvious | |
| fall | |
| moving | |
| windows | |
| deal | |
| features | |
| desktop | |
| button | |
| taskbar | |
| networking | |
| access | |
| peripherals | |
| became | |
| affordable | |
| plug | |
| plenty | |
| bugs | |
| success | |
| parkinson | |
| table | |
| nudes | |
| xhamster | |
| photos | |
| view | |
| html | |
| imgtop | |
| entirely | |
| animoog | |
| preset | |
| effect | |
| bought | |
| built | |
| ikea | |
| chair | |
| comfortable | |
| fail | |
| english | |
| speak | |
| bitch | |
| daniel | |
| dennett | |
| argues | |
| scotsman | |
| radicals | |
| certainly | |
| allah | |
| relevant | |
| peaceful | |
| issue | |
| national | |
| warning | |
| attitude | |
| economic | |
| pump | |
| love | |
| wrestling | |
| officials | |
| drop | |
| elbow | |
| fake | |
| injuries | |
| magic | |
| spray | |
| hondurans | |
| goal | |
| ahead | |
| badass | |
| twilight | |
| newsgab | |
| attachments | |
| celebrity | |
| pictures | |
| teresa | |
| palmer | |
| esquire | |
| stewart | |
| boyfriend | |
| husband | |
| tiger | |
| anniversary | |
| antique | |
| carved | |
| ivory | |
| bedside | |
| choose | |
| wisely | |
| daylight | |
| matt | |
| photo | |
| pathetic | |
| guudelp | |
| https | |
| status | |
| tonight | |
| mindcrackers | |
| stabbed | |
| hype | |
| sunday | |
| tweetposter | |
| above | |
| unaltered | |
| suggestion | |
| translate | |
| statistics | |
| stathat | |
| decks | |
| juggernaut | |
| biiitttch | |
| sport | |
| everytime | |
| little | |
| bitches | |
| poor | |
| explorer | |
| sitting | |
| corner | |
| rotting | |
| spend | |
| amount | |
| lurkers | |
| upvoted | |
| maybe | |
| daymn | |
| thats | |
| cute | |
| mahrunes | |
| razor | |
| deadra | |
| hearts | |
| shell | |
| mild | |
| submission | |
| annoyed | |
| brown | |
| suit | |
| patton | |
| oswald | |
| epic | |
| stars | |
| wars | |
| rant | |
| committed | |
| part | |
| censorship | |
| javits | |
| center | |
| east | |
| inconvenient | |
| jersey | |
| giants | |
| jets | |
| stadiums | |
| mother | |
| soloq | |
| definetly | |
| requiered | |
| skill | |
| threw | |
| elos | |
| rate | |
| extremly | |
| continue | |
| aswell | |
| tier | |
| professional | |
| fast | |
| soon | |
| challenger | |
| mention | |
| cross | |
| console | |
| whaaaaat | |
| essay | |
| fine | |
| probable | |
| cause | |
| reasonable | |
| suspicion | |
| prove | |
| american | |
| criminal | |
| justice | |
| days | |
| crying | |
| cdhul | |
| sister | |
| frog | |
| shaped | |
| purse | |
| foolish | |
| change | |
| patterns | |
| established | |
| exactly | |
| happened | |
| brainard | |
| airport | |
| hartford | |
| instrument | |
| approach | |
| runway | |
| south | |
| offset | |
| degrees | |
| actual | |
| noise | |
| abatement | |
| planes | |
| river | |
| homes | |
| wethersfield | |
| maps | |
| localizer | |
| antenna | |
| structure | |
| khfd | |
| sspn | |
| partner | |
| attractive | |
| goofing | |
| ends | |
| giggling | |
| turned | |
| incredible | |
| intro | |
| edition | |
| mere | |
| christianity | |
| lewis | |
| talks | |
| uncomfortable | |
| hybrid | |
| speaking | |
| talking | |
| desribes | |
| bell | |
| writing | |
| style | |
| genius | |
| precisely | |
| pinpoint | |
| areas | |
| discomfort | |
| confusion | |
| evangelicalism | |
| large | |
| triggers | |
| immune | |
| discussion | |
| healing | |
| pain | |
| cant | |
| feeling | |
| certain | |
| overall | |
| felt | |
| normal | |
| stuck | |
| analyzing | |
| lives | |
| north | |
| central | |
| ohio | |
| zone | |
| microclimate | |
| warmer | |
| microclimates | |
| near | |
| house | |
| envelope | |
| several | |
| yard | |
| mulch | |
| beds | |
| musa | |
| sikkimensis | |
| zebrina | |
| rojo | |
| corm | |
| transplant | |
| outside | |
| march | |
| cold | |
| april | |
| warm | |
| bring | |
| potted | |
| tropicals | |
| basjoo | |
| bananas | |
| sprouted | |
| foot | |
| crabapple | |
| empress | |
| bloom | |
| middle | |
| everything | |
| explodes | |
| leaf | |
| forgot | |
| original | |
| question | |
| plan | |
| friended | |
| loving | |
| calls | |
| human | |
| goes | |
| dismiss | |
| classification | |
| retarded | |
| discussig | |
| videogames | |
| trolls | |
| arent | |
| azubu | |
| screwed | |
| koreans | |
| earlier | |
| promosing | |
| seems | |
| luck | |
| abuzu | |
| screw | |
| apex | |
| creating | |
| rides | |
| functioning | |
| park | |
| workbenches | |
| seeing | |
| specifics | |
| existing | |
| bench | |
| marking | |
| match | |
| unclassified | |
| permission | |
| nasa | |
| casi | |
| ntrs | |
| server | |
| tomorrow | |
| puzzle | |
| solve | |
| answers | |
| released | |
| infuriated | |
| seine | |
| crossing | |
| camp | |
| suddenly | |
| tanks | |
| infantry | |
| goggles | |
| supernova | |
| wipes | |
| planet | |
| clean | |
| blowing | |
| nearby | |
| black | |
| hole | |
| given | |
| galactic | |
| timescale | |
| civilized | |
| planets | |
| indeed | |
| unlikely | |
| affected | |
| endure | |
| period | |
| instantly | |
| expectation | |
| mcavoy | |
| news | |
| anchor | |
| existed | |
| desensitized | |
| nudity | |
| coca | |
| cola | |
| almost | |
| century | |
| variations | |
| deeply | |
| plebes | |
| understanding | |
| discount | |
| default | |
| guilty | |
| sins | |
| fans | |
| knowledge | |
| typography | |
| condescending | |
| trust | |
| issues | |
| porn | |
| adjusted | |
| lyric | |
| chalkboard | |
| signs | |
| front | |
| page | |
| month | |
| stolen | |
| videos | |
| simpleton | |
| understand | |
| factual | |
| stating | |
| paul | |
| mccartney | |
| legged | |
| password | |
| filling | |
| disappears | |
| trusted | |
| welcome | |
| circling | |
| stays | |
| untill | |
| pops | |
| used | |
| immediatly | |
| seconds | |
| changed | |
| excuse | |
| hostility | |
| goodness | |
| gardens | |
| separate | |
| venture | |
| sells | |
| produce | |
| supermarkets | |
| snake | |
| fresh | |
| herbs | |
| cooking | |
| pure | |
| gimmicky | |
| flavors | |
| aroma | |
| produced | |
| rosemary | |
| thyme | |
| sage | |
| placebo | |
| pour | |
| vegetable | |
| turkey | |
| taste | |
| ridiculous | |
| heretic | |
| lunacy | |
| explanation | |
| dosages | |
| product | |
| pledge | |
| asked | |
| simple | |
| ingredient | |
| learn | |
| interact | |
| properly | |
| diligence | |
| falsely | |
| accusing | |
| owned | |
| industry | |
| columbine | |
| schools | |
| moved | |
| arizona | |
| metal | |
| detectors | |
| security | |
| colorado | |
| nothin | |
| campus | |
| guards | |
| miles | |
| away | |
| forte | |
| lacy | |
| andre | |
| megatron | |
| johnson | |
| liked | |
| floor | |
| fell | |
| lava | |
| beta | |
| frustrating | |
| personally | |
| somewhat | |
| predictable | |
| ceiling | |
| cave | |
| touch | |
| unpredictable | |
| private | |
| university | |
| meant | |
| blithering | |
| moron | |
| richo | |
| mannnnn | |
| mannnnnnnnnnnnn | |
| trading | |
| clarifying | |
| confused | |
| seattles | |
| months | |
| emissions | |
| resolved | |
| diagnostically | |
| check | |
| fuel | |
| weet | |
| pectacular | |
| pectator | |
| porting | |
| event | |
| steam | |
| lumberjack | |
| wondering | |
| surely | |
| cares | |
| hilarious | |
| scenario | |
| lead | |
| anal | |
| german | |
| scared | |
| harsh | |
| accurately | |
| text | |
| gather | |
| naive | |
| office | |
| inaction | |
| incompetence | |
| must | |
| regret | |
| specific | |
| literature | |
| teach | |
| specifically | |
| intends | |
| explain | |
| northern | |
| hemi | |
| wont | |
| teams | |
| hash | |
| income | |
| wowcomics | |
| phone | |
| wikipedia | |
| wiki | |
| bigfin | |
| squid | |
| eggs | |
| process | |
| hatching | |
| donald | |
| glover | |
| hahaha | |
| stealing | |
| reposting | |
| adding | |
| imaginary | |
| psychological | |
| flowing | |
| imagine | |
| five | |
| efforts | |
| worked | |
| student | |
| loan | |
| relate | |
| mcvenom | |
| pack | |
| mindcrack | |
| rooms | |
| prematurely | |
| starts | |
| drumming | |
| detailpage | |
| finale | |
| accept | |
| looked | |
| gothic | |
| kruty | |
| klown | |
| meth | |
| storebought | |
| sushi | |
| rather | |
| telling | |
| malignancy | |
| dangerous | |
| happy | |
| allow | |
| simmering | |
| scale | |
| distance | |
| penny | |
| anyways | |
| apologies | |
| tracking | |
| attract | |
| unwanted | |
| regulatory | |
| downvoted | |
| truth | |
| islamic | |
| society | |
| detained | |
| considered | |
| surprisingly | |
| moderate | |
| punishment | |
| homosexuality | |
| reading | |
| carefully | |
| fairly | |
| information | |
| kuwait | |
| save | |
| opinions | |
| disagree | |
| timeouts | |
| additional | |
| sole | |
| advertising | |
| commericals | |
| outs | |
| possession | |
| breaks | |
| legitimate | |
| extended | |
| commercials | |
| peace | |
| board | |
| papercrane | |
| pointed | |
| slipped | |
| comparing | |
| boards | |
| previously | |
| identical | |
| vrms | |
| connector | |
| worry | |
| waving | |
| cent | |
| course | |
| told | |
| inscrutable | |
| espn | |
| radio | |
| plugs | |
| format | |
| describe | |
| followed | |
| subway | |
| opinion | |
| shit | |
| wheat | |
| sandwich | |
| featured | |
| footlong | |
| john | |
| shithead | |
| validate | |
| penzoil | |
| engine | |
| hotline | |
| leader | |
| stereotype | |
| applies | |
| females | |
| male | |
| driver | |
| drives | |
| souped | |
| subaru | |
| likes | |
| drift | |
| driving | |
| experience | |
| stereotypes | |
| flag | |
| doesnt | |
| america | |
| choo | |
| motherf | |
| nooooooo | |
| path | |
| paper | |
| allows | |
| upgrades | |
| standard | |
| degrade | |
| stat | |
| scaling | |
| shots | |
| model | |
| works | |
| suggest | |
| meskell | |
| deals | |
| gender | |
| ancient | |
| egypt | |
| directly | |
| pertaining | |
| joyce | |
| mesoamerican | |
| voss | |
| china | |
| spanish | |
| colonialism | |
| butler | |
| studies | |
| universally | |
| regarded | |
| seminal | |
| source | |
| materials | |
| aspects | |
| egyptian | |
| explored | |
| constraints | |
| written | |
| sources | |
| overlooked | |
| gratin | |
| dauphinois | |
| crois | |
| difficile | |
| choisir | |
| merci | |
| donner | |
| faim | |
| sayin | |
| safe | |
| operating | |
| surrounded | |
| vehicles | |
| contain | |
| proportional | |
| amounts | |
| idiots | |
| drunks | |
| drivers | |
| wasn | |
| cinematic | |
| trailer | |
| poorly | |
| gameplay | |
| knot | |
| paracord | |
| core | |
| strands | |
| tighter | |
| soft | |
| highschool | |
| carrying | |
| clearly | |
| walking | |
| respond | |
| figured | |
| fired | |
| exception | |
| boss | |
| themselves | |
| lifers | |
| according | |
| housewives | |
| dyes | |
| negative | |
| impact | |
| children | |
| ability | |
| turns | |
| programming | |
| beat | |
| tiny | |
| hugely | |
| elated | |
| sharing | |
| myspace | |
| adults | |
| subscribed | |
| hydras | |
| zerg | |
| drops | |
| roaches | |
| lings | |
| simcity | |
| science | |
| spindle | |
| dice | |
| periwinkle | |
| shooting | |
| yarrow | |
| paintbrushes | |
| foreground | |
| range | |
| california | |
| alpine | |
| yellow | |
| flowers | |
| disturbed | |
| land | |
| identified | |
| dandelion | |
| conifers | |
| lodgepole | |
| whitebark | |
| spruce | |
| limber | |
| pine | |
| rules | |
| treelines | |
| sierras | |
| alaska | |
| cascade | |
| treeline | |
| klamath | |
| trinities | |
| yolla | |
| bollies | |
| douglas | |
| white | |
| rock | |
| fields | |
| serpentine | |
| serpenetine | |
| confirms | |
| steep | |
| relief | |
| peak | |
| snow | |
| plus | |
| places | |
| developed | |
| campground | |
| location | |
| flora | |
| basin | |
| mexico | |
| montana | |
| church | |
| background | |
| elders | |
| quote | |
| doctrinal | |
| appointed | |
| pastors | |
| overseers | |
| titus | |
| serve | |
| under | |
| shepherds | |
| chief | |
| shepherd | |
| returns | |
| peter | |
| deacons | |
| timothy | |
| assist | |
| ministry | |
| dominant | |
| goalie | |
| raise | |
| quick | |
| modest | |
| crowd | |
| crazy | |
| expected | |
| western | |
| conference | |
| counts | |
| eaten | |
| child | |
| assed | |
| glaring | |
| plot | |
| holes | |
| development | |
| horrible | |
| planning | |
| conclusion | |
| triangles | |
| sides | |
| radii | |
| pythagorean | |
| theorem | |
| third | |
| guarantee | |
| positives | |
| negatives | |
| permadeath | |
| permanent | |
| death | |
| comr | |
| beginning | |
| meet | |
| bitcoins | |
| alley | |
| hoodie | |
| staring | |
| ground | |
| mathematically | |
| provable | |
| anonymity | |
| scheme | |
| realm | |
| crypto | |
| heard | |
| laundering | |
| services | |
| wank | |
| daughter | |
| bull | |
| crew | |
| episodes | |
| spongebob | |
| remotely | |
| amusing | |
| straight | |
| simon | |
| levay | |
| full | |
| muehehhehehehe | |
| mine | |
| withdrawal | |
| symptoms | |
| companies | |
| religion | |
| contributed | |
| regression | |
| following | |
| roman | |
| empire | |
| basing | |
| societal | |
| beliefs | |
| ftfy | |
| klopper | |
| rampage | |
| dinosaurs | |
| photograph | |
| misleading | |
| plato | |
| socrates | |
| divine | |
| capacity | |
| forms | |
| supernatural | |
| considers | |
| forebears | |
| interpret | |
| insult | |
| dislike | |
| boehner | |
| priority | |
| offensive | |
| playcaller | |
| list | |
| quit | |
| wasting | |
| soluble | |
| alcohol | |
| propane | |
| butane | |
| hexane | |
| smoke | |
| extracting | |
| extract | |
| goods | |
| googling | |
| solubility | |
| isnt | |
| potentcy | |
| method | |
| bubble | |
| bags | |
| extraction | |
| socks | |
| polyamorous | |
| define | |
| relationships | |
| poly | |
| relationship | |
| monogamous | |
| nearly | |
| relied | |
| steadfast | |
| open | |
| parted | |
| opposite | |
| apart | |
| believing | |
| compromising | |
| ultimately | |
| decide | |
| realized | |
| premised | |
| situation | |
| hadn | |
| signed | |
| passing | |
| jealous | |
| knew | |
| genetics | |
| grand | |
| dating | |
| below | |
| upside | |
| believed | |
| share | |
| value | |
| depreciated | |
| calculated | |
| tough | |
| decision | |
| depressed | |
| polyandrous | |
| happier | |
| staunch | |
| monogamist | |
| options | |
| helm | |
| sticking | |
| entered | |
| trying | |
| problematic | |
| dissatisfied | |
| void | |
| filled | |
| justifiable | |
| changing | |
| mono | |
| significant | |
| circumstances | |
| becoming | |
| permanently | |
| maimed | |
| factors | |
| acceptable | |
| working | |
| concede | |
| enter | |
| bait | |
| switch | |
| futile | |
| attempt | |
| salvage | |
| waning | |
| happening | |
| resentment | |
| toward | |
| rule | |
| seeks | |
| desire | |
| quelled | |
| desires | |
| forward | |
| beyondtwo | |
| backdrop | |
| erik | |
| ruined | |
| zedong | |
| communist | |
| dicator | |
| charge | |
| wwii | |
| ship | |
| paying | |
| aitch | |
| eych | |
| dictionary | |
| browse | |
| pronounciation | |
| known | |
| pronounced | |
| haitch | |
| annoying | |
| starting | |
| aspirated | |
| peeve | |
| novel | |
| dracula | |
| weaker | |
| college | |
| woods | |
| joint | |
| subreddit | |
| disturbing | |
| election | |
| jump | |
| rope | |
| running | |
| treatment | |
| bradley | |
| manning | |
| reasons | |
| textless | |
| feedback | |
| oxford | |
| derived | |
| hnutu | |
| germanic | |
| origin | |
| related | |
| dutch | |
| noot | |
| nuss | |
| props | |
| geoff | |
| emerick | |
| helped | |
| pull | |
| creative | |
| sounds | |
| oatmeal | |
| breakfast | |
| butter | |
| raisins | |
| skilled | |
| okay | |
| bothering | |
| married | |
| town | |
| midwestern | |
| twenties | |
| thus | |
| ideal | |
| marriageable | |
| overwhelming | |
| atmosphere | |
| beyond | |
| occasionally | |
| meeting | |
| assume | |
| understandable | |
| goals | |
| personal | |
| failing | |
| persistent | |
| pressure | |
| supposed | |
| shitty | |
| movies | |
| focusing | |
| weddings | |
| unoriginal | |
| debeers | |
| pushing | |
| engagement | |
| rings | |
| sandals | |
| honeymoon | |
| resorts | |
| huggies | |
| newest | |
| diaper | |
| eventual | |
| shave | |
| whatever | |
| possible | |
| greater | |
| motivation | |
| capitalize | |
| social | |
| construct | |
| population | |
| participate | |
| skills | |
| degree | |
| misconstrue | |
| messages | |
| newlyweds | |
| qualified | |
| psychologist | |
| alone | |
| author | |
| goddamn | |
| huffingtonpost | |
| helen | |
| smith | |
| successful | |
| marriage | |
| constitute | |
| matters | |
| chapter | |
| scawwy | |
| execs | |
| jobs | |
| unicorn | |
| removing | |
| replace | |
| figure | |
| networks | |
| delete | |
| account | |
| cigarettes | |
| drink | |
| bars | |
| clubs | |
| pubs | |
| parties | |
| weed | |
| started | |
| afford | |
| foods | |
| tastebuds | |
| overwhelmed | |
| flavour | |
| flush | |
| palette | |
| balsamic | |
| vinegar | |
| blue | |
| cheese | |
| favourite | |
| salad | |
| gives | |
| hover | |
| disconnected | |
| pretend | |
| involved | |
| careful | |
| notgiveafuckistan | |
| cliffs | |
| isolationia | |
| boat | |
| lonely | |
| paddle | |
| supporting | |
| journal | |
| ngaf | |
| imho | |
| coat | |
| paulsmith | |
| shop | |
| coats | |
| outerwear | |
| mens | |
| navy | |
| trousers | |
| topman | |
| webapp | |
| stores | |
| productdisplay | |
| beginindex | |
| viewallflag | |
| catalogid | |
| storeid | |
| productid | |
| langid | |
| field | |
| newness | |
| categoryid | |
| parent | |
| pagesize | |
| trainers | |
| suprafootwear | |
| press | |
| shut | |
| poll | |
| xboxonevspsfour | |
| nowhere | |
| amazon | |
| claims | |
| roughly | |
| votes | |
| polls | |
| cheated | |
| liverpool | |
| relegation | |
| candidates | |
| houllier | |
| weave | |
| slower | |
| traffic | |
| cyclist | |
| mile | |
| necessary | |
| avoid | |
| nuisance | |
| legal | |
| alternative | |
| minimize | |
| annoyance | |
| bike | |
| lane | |
| safer | |
| pedestrians | |
| surroundings | |
| yield | |
| follow | |
| occasional | |
| crazed | |
| rocker | |
| ignore | |
| moves | |
| illegal | |
| releases | |
| pawn | |
| metaphor | |
| slap | |
| yewtueb | |
| trained | |
| marathon | |
| rookie | |
| marathonrookie | |
| training | |
| ready | |
| beginners | |
| build | |
| walk | |
| recommendation | |
| trade | |
| dream | |
| extensively | |
| proviso | |
| eevee | |
| tobey | |
| image | |
| west | |
| midlands | |
| fire | |
| service | |
| songs | |
| fungus | |
| amongus | |
| enjoy | |
| incubus | |
| bass | |
| string | |
| transpose | |
| ballsy | |
| myth | |
| structures | |
| surmounted | |
| government | |
| angels | |
| corrupted | |
| corruptible | |
| create | |
| bureaucratic | |
| perpetuate | |
| regardless | |
| whether | |
| creates | |
| punish | |
| crimes | |
| emptively | |
| harm | |
| entices | |
| evading | |
| justified | |
| aren | |
| punishing | |
| attempting | |
| empt | |
| depriving | |
| encourage | |
| reclaim | |
| lost | |
| hayek | |
| discusses | |
| establish | |
| aspect | |
| weakened | |
| republicans | |
| rebuild | |
| imputing | |
| properties | |
| cheerleader | |
| young | |
| mentally | |
| emotionally | |
| abused | |
| manipulated | |
| mental | |
| medication | |
| lying | |
| abusive | |
| stand | |
| funsized | |
| jugwy | |
| thoughts | |
| minimalist | |
| although | |
| legibility | |
| wanted | |
| light | |
| administration | |
| hoping | |
| revolt | |
| courts | |
| steps | |
| label | |
| attracted | |
| whoever | |
| connection | |
| neat | |
| tidy | |
| category | |
| rode | |
| flew | |
| window | |
| closed | |
| absofuckinglutely | |
| terrifying | |
| zero | |
| jumped | |
| dance | |
| pokemon | |
| continental | |
| pattern | |
| heads | |
| heading | |
| interned | |
| japanese | |
| americans | |
| coast | |
| citizens | |
| diphtheria | |
| zrane | |
| stupidity | |
| solutions | |
| wartime | |
| blaze | |
| glory | |
| invent | |
| medal | |
| honor | |
| preferrably | |
| fighter | |
| pilot | |
| phlebas | |
| enjoyed | |
| knowing | |
| individual | |
| base | |
| completionist | |
| ended | |
| exclusively | |
| kindle | |
| lately | |
| banks | |
| novels | |
| depending | |
| storylines | |
| rare | |
| eventually | |
| library | |
| bravo | |
| habitat | |
| restore | |
| door | |
| knobs | |
| mechanisms | |
| nicer | |
| refurbishing | |
| latches | |
| charm | |
| scrape | |
| soak | |
| sand | |
| paint | |
| rustoleum | |
| colored | |
| hammer | |
| hardware | |
| qkme | |
| shittier | |
| denying | |
| reality | |
| affirming | |
| virtuous | |
| encouraging | |
| decided | |
| veracity | |
| fairy | |
| tale | |
| wherein | |
| illiterate | |
| arab | |
| warlord | |
| rational | |
| nutjob | |
| deeds | |
| worthless | |
| harmless | |
| tells | |
| ladies | |
| street | |
| craziness | |
| wife | |
| laundry | |
| chop | |
| sword | |
| imam | |
| pissed | |
| confronting | |
| muslim | |
| violence | |
| rests | |
| squarely | |
| proclaimed | |
| mentioned | |
| acts | |
| inspired | |
| invisible | |
| voice | |
| highest | |
| virtue | |
| announced | |
| existence | |
| nervous | |
| updates | |
| switched | |
| hinduism | |
| transitioned | |
| insanity | |
| traded | |
| trouble | |
| comprehending | |
| realization | |
| fallen | |
| faith | |
| evidently | |
| bullshit | |
| oppresses | |
| adherents | |
| encourages | |
| murder | |
| apostates | |
| adulterers | |
| members | |
| faiths | |
| unwilling | |
| convert | |
| yours | |
| improvement | |
| harmful | |
| carry | |
| presented | |
| eloquently | |
| greta | |
| christina | |
| armor | |
| freethoughtblogs | |
| gawker | |
| alternate | |
| universe | |
| muddied | |
| including | |
| connor | |
| photoengraving | |
| wildly | |
| determined | |
| chromosome | |
| inherited | |
| wake | |
| install | |
| pointless | |
| toolbars | |
| alongside | |
| programs | |
| management | |
| card | |
| convienent | |
| machine | |
| imaginable | |
| multiple | |
| antivirus | |
| photoshop | |
| software | |
| bundled | |
| printer | |
| medically | |
| doubt | |
| asses | |
| bones | |
| neck | |
| paramedic | |
| freaked | |
| scrolling | |
| wondered | |
| worded | |
| trigger | |
| unblocked | |
| creature | |
| madara | |
| future | |
| boots | |
| belly | |
| undoubtedly | |
| finest | |
| somebody | |
| bitter | |
| christians | |
| bible | |
| damning | |
| unit | |
| longshot | |
| retail | |
| woofers | |
| frequencies | |
| floorstanders | |
| mastered | |
| record | |
| unbreakable | |
| atheist | |
| bother | |
| practicing | |
| christian | |
| catholic | |
| noseover | |
| maneuver | |
| evangelion | |
| canon | |
| ending | |
| budget | |
| humour | |
| cigarette | |
| liquid | |
| nicotine | |
| responses | |
| personalfinance | |
| frontlines | |
| investigation | |
| retirement | |
| frontline | |
| exposes | |
| scam | |
| fees | |
| bored | |
| shuddered | |
| carpet | |
| filthy | |
| sorta | |
| anime | |
| macross | |
| robotech | |
| understands | |
| chronic | |
| sleeping | |
| include | |
| responded | |
| sayng | |
| sounded | |
| depressing | |
| diving | |
| parachute | |
| malfunction | |
| forever | |
| doctors | |
| texas | |
| require | |
| particular | |
| curriculum | |
| settled | |
| public | |
| secular | |
| material | |
| online | |
| instructions | |
| revive | |
| tourist | |
| district | |
| kirkman | |
| metrowest | |
| parramore | |
| hills | |
| hands | |
| towns | |
| pink | |
| dark | |
| describes | |
| ensure | |
| streaks | |
| maths | |
| crop | |
| lens | |
| exposure | |
| extend | |
| scary | |
| movie | |
| bagnon | |
| fixed | |
| joke | |
| latin | |
| hispanic | |
| race | |
| technical | |
| garner | |
| lesser | |
| rights | |
| season | |
| projections | |
| indicate | |
| matchups | |
| weren | |
| indication | |
| kansas | |
| worthy | |
| higher | |
| false | |
| passed | |
| showing | |
| arguing | |
| against | |
| draw | |
| note | |
| gains | |
| benefits | |
| fireblast | |
| multicast | |
| unions | |
| tipbot | |
| behind | |
| cases | |
| tips | |
| pending | |
| expire | |
| provided | |
| itself | |
| typo | |
| multi | |
| dimension | |
| techonolgy | |
| tardis | |
| grapefruit | |
| sting | |
| terrible | |
| color | |
| leds | |
| spaced | |
| subjective | |
| ukrainians | |
| exile | |
| extremely | |
| ukraine | |
| politics | |
| born | |
| protein | |
| myostatin | |
| pics | |
| myhqxzt | |
| barinfiniti | |
| mobiusdicks | |
| automated | |
| originalfinder | |
| index | |
| finder | |
| brandon | |
| wasnt | |
| talked | |
| sucks | |
| bonus | |
| break | |
| itching | |
| cake | |
| retconning | |
| timeline | |
| serenity | |
| explore | |
| firefly | |
| characters | |
| flaying | |
| code | |
| banning | |
| sentencing | |
| absentia | |
| fighting | |
| slaves | |
| sons | |
| plantation | |
| owners | |
| latter | |
| definition | |
| escape | |
| scarra | |
| voyboy | |
| theoddone | |
| doublelift | |
| vision | |
| hardest | |
| washington | |
| sony | |
| launch | |
| lineup | |
| weak | |
| driveclub | |
| pushed | |
| factor | |
| inevitably | |
| catalog | |
| cable | |
| mode | |
| wether | |
| tole | |
| mess | |
| device | |
| router | |
| residential | |
| modems | |
| modem | |
| begin | |
| visit | |
| forums | |
| thorough | |
| explaining | |
| nerfs | |
| buffs | |
| riot | |
| rarely | |
| complaints | |
| community | |
| reacts | |
| concerned | |
| extreme | |
| dissatisfaction | |
| unjustifiable | |
| judging | |
| cynics | |
| circlejerking | |
| complaint | |
| complain | |
| succumb | |
| forces | |
| extrasolar | |
| dips | |
| intensity | |
| infer | |
| galaxies | |
| guessing | |
| brilliant | |
| pmsing | |
| type | |
| faster | |
| gears | |
| impressed | |
| deaverveiw | |
| supposing | |
| desired | |
| expression | |
| absolute | |
| function | |
| midpoint | |
| expressions | |
| francis | |
| approved | |
| disappointment | |
| reflected | |
| licences | |
| shadowy | |
| irish | |
| offshore | |
| represent | |
| interests | |
| presumably | |
| prices | |
| copeland | |
| waiting | |
| clown | |
| evil | |
| grin | |
| float | |
| georgie | |
| sing | |
| alot | |
| pissing | |
| suburban | |
| checked | |
| shipping | |
| taxes | |
| mcclane | |
| sylvester | |
| stallone | |
| direct | |
| happen | |
| australia | |
| stocks | |
| dough | |
| forget | |
| certificates | |
| certifications | |
| area | |
| certs | |
| individually | |
| researching | |
| editions | |
| insanely | |
| ears | |
| mugham | |
| african | |
| eastern | |
| folk | |
| azeri | |
| segah | |
| youngster | |
| mirelem | |
| mirelemov | |
| hypnotic | |
| vibe | |
| cakeday | |
| introduce | |
| tolkien | |
| hasn | |
| hobbit | |
| lord | |
| ursula | |
| leguin | |
| earthsea | |
| bookstore | |
| grown | |
| sabur | |
| sakai | |
| samurai | |
| japan | |
| died | |
| renouncing | |
| fostering | |
| tricky | |
| links | |
| local | |
| atlanta | |
| organizations | |
| rescuemeanimalproject | |
| pages | |
| rescue | |
| animalrescueassistance | |
| bully | |
| maxed | |
| cats | |
| pearls | |
| denieru | |
| violated | |
| grezzles | |
| wore | |
| gloves | |
| correctly | |
| quarterback | |
| practice | |
| warmth | |
| wear |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment