๐งต Multithreading
Threading allows you to run multiple operations concurrently in a single process.
Mastering this concept will significantly boost your Python data science skills!
๐ป Code Example:
import threading import time import queue from concurrent.futures import ThreadPoolExecutor, as_completed # 1. Basic Thread def pynfinity_task(name: str, delay: float) -> None: print(f"[{name}] started") time.sleep(delay) print(f"[{name}] done after {delay}s") t1 = threading.Thread(target=pynfinity_task, args=("Pebble Loader", 1.0)) t2 = threading.Thread(target=pynfinity_task, args=("Score Updater", 0.5)) t1.start(); t2.start() t1.join(); t2.join() # 2. Lock โ prevent race conditions counter = 0 lock = threading.Lock() def safe_increment(): global counter for _ in range(10_000): with lock: counter += 1 threads = [threading.Thread(target=safe_increment) for _ in range(5)] for t in threads: t.start() for t in threads: t.join() print(f"\nFinal counter (should be 50,000): {counter:,}") # 3. Event โ signal between threads event = threading.Event() def data_producer(): print("[Producer] Generating pynfinity report...") time.sleep(1) event.set() # Signal consumer print("[Producer] Report ready!") def data_consumer(): print("[Consumer] Waiting for report...") event.wait() # Block until event is set print("[Consumer] Processing report!") p = threading.Thread(target=data_producer) c = threading.Thread(target=data_consumer) c.start(); p.start() c.join(); p.join() # 4. ThreadPoolExecutor โ easiest API for concurrent I/O def fetch_pynfinity_score(user_id: int) -> dict: time.sleep(0.1) # Simulate I/O (API call, DB query) return {"user_id": user_id, "score": (user_id * 37) % 100} print("\nFetching scores concurrently:") with ThreadPoolExecutor(max_workers=8) as executor: futures = {executor.submit(fetch_pynfinity_score, uid): uid for uid in range(1, 11)} for future in as_completed(futures): result = future.result() print(f" User {result['user_id']:>2}: {result['score']}")
Keep exploring and happy coding! ๐ป