Skip to content

Instantly share code, notes, and snippets.

@kntjspr
Created March 7, 2026 07:38
Show Gist options
  • Select an option

  • Save kntjspr/bda8443b54a854c574b2123e4f0c9ebc to your computer and use it in GitHub Desktop.

Select an option

Save kntjspr/bda8443b54a854c574b2123e4f0c9ebc to your computer and use it in GitHub Desktop.
import time
import random
from concurrent.futures import ThreadPoolExecutor, as_completed
orders = [
{"id": 1, "customer": "Alice Martin", "item": "Laptop", "amount": 1299.99},
{"id": 2, "customer": "Bob Chen", "item": "Wireless Mouse", "amount": 39.99},
{"id": 3, "customer": "Carol White", "item": "Mechanical Keyboard", "amount": 129.99},
{"id": 4, "customer": "David Kim", "item": "Monitor 27", "amount": 449.99},
{"id": 5, "customer": "Eva Gonzalez", "item": "USB-C Hub", "amount": 59.99},
{"id": 6, "customer": "Frank Okafor", "item": "Webcam HD", "amount": 89.99},
{"id": 7, "customer": "Grace Liu", "item": "Headphones", "amount": 199.99},
{"id": 8, "customer": "Hiro Tanaka", "item": "SSD 1TB", "amount": 99.99},
{"id": 9, "customer": "Isla Patel", "item": "Smart Watch", "amount": 249.99},
{"id": 10, "customer": "Jake Andersen", "item": "Desk Lamp", "amount": 34.99},
]
def process_order(order):
id = order["id"]
print(f"Processing Order {id}...")
# step 1 - verify order
print(f"Order {id}: Verifying order")
time.sleep(random.uniform(0.3, 0.7))
if order["amount"] <= 0:
print(f"Order {id}: Verification failed")
return
# step 2 - process payment
print(f"Order {id}: Processing payment")
time.sleep(random.uniform(0.5, 1.0))
# step 3 - packaging
print(f"Order {id}: Packaging item")
time.sleep(random.uniform(0.4, 0.8))
# step 4 - shipping
print(f"Order {id}: Ready for shipping")
time.sleep(random.uniform(0.3, 0.6))
print(f"Order {id} completed.")
# Q1: I used concurrent.futures ThreadPoolExecutor
# Q2: I picked it because it lets me run multiple orders at the same time without doing threads manually
# Q3: This is task parallelism because each order is its own task running the same steps independently
# Q4: This is I/O-bound because most of the time is spent waiting (sleep) not doing calculations
# Q5: time.sleep simulates waiting for real things like payment apis or shipping systems to respond
# Q6: Processing orders at the same time means we dont have to wait for one to finish before starting the next
# Q7: For thousands of orders we could use a message queue like RabbitMQ so multiple machines can process orders
with ThreadPoolExecutor(max_workers=4) as executor:
futures = [executor.submit(process_order, order) for order in orders]
for future in as_completed(futures):
future.result()
print("All orders have been completed
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment