Master Python threading module with Thread, Lock, Event, Semaphore, Condition, and Queue. Understand GIL and thread synchronization.
📌 Python threading, multithreading Python, GIL, thread synchronization, Lock Python, concurrent programming
Multithreading allows your program to perform multiple tasks (threads) seemingly simultaneously within a single process. All threads share the same memory space, enabling efficient communication but requiring careful synchronization.
The threading module provides a high-level interface for working with threads. Each thread represents a separate sequence of instructions executing independently. Key methods: Thread(target=func) creates threads, start() begins execution, and join() waits for completion.
The Global Interpreter Lock (GIL) is Python's mutex preventing multiple threads from executing Python bytecode simultaneously. For CPU-bound tasks, multithreading won't yield performance gains on multi-core systems. For I/O-bound tasks, the GIL is released during blocking operations, allowing other threads to run.
Thread synchronization prevents race conditions when multiple threads access shared resources. Python provides Lock, Event, Semaphore, BoundedSemaphore, and Condition for different synchronization patterns—each suited for specific concurrency scenarios.
Queue.threading is the recommended way for inter-thread communication. It's thread-safe, handles all locking internally, and supports producer-consumer patterns elegantly with put() and get() operations.
import threading
import time
def worker(number, sleep_time):
print(f"Thread {number}: starting, will sleep {sleep_time} sec.")
time.sleep(sleep_time)
print(f"Thread {number}: finishing.")
# Create threads
thread1 = threading.Thread(target=worker, args=(1, 2))
thread2 = threading.Thread(target=worker, args=(2, 1))
# Start threads
thread1.start()
thread2.start()
print("Main: All threads started.")
# Wait for threads to complete
thread1.join()
thread2.join()
print("Main: All threads finished.")import threading
import time
class WorkerThread(threading.Thread):
def __init__(self, name, delay):
super().__init__()
self.name = name
self.delay = delay
def run(self):
print(f"{self.name}: starting")
time.sleep(self.delay)
print(f"{self.name}: finishing")
# Create and start threads
t1 = WorkerThread("Worker-A", 2)
t2 = WorkerThread("Worker-B", 1)
t1.start()
t2.start()
t1.join()
t2.join()
print("All workers done")import threading
shared_counter = 0
lock = threading.Lock()
def increment():
global shared_counter
for _ in range(100000):
with lock: # Acquires lock, releases after block
shared_counter += 1
threads = []
for _ in range(5):
t = threading.Thread(target=increment)
threads.append(t)
t.start()
for t in threads:
t.join()
print(f"Final counter: {shared_counter}")
# Correct result: 500000 (without lock: race condition!)import threading
import time
event = threading.Event()
def waiter(name):
print(f"{name}: waiting for event...")
event.wait() # Blocks until event.set() is called
print(f"{name}: event received, continuing!")
def setter():
print("Setter: waiting 2 seconds...")
time.sleep(2)
print("Setter: setting event!")
event.set() # Unblocks all waiting threads
# Start multiple waiters and one setter
threads = []
for i in range(3):
t = threading.Thread(target=waiter, args=(f"Waiter-{i}",))
threads.append(t)
t.start()
setter_thread = threading.Thread(target=setter)
setter_thread.start()
for t in threads + [setter_thread]:
t.join()import threading
import time
import random
MAX_CONNECTIONS = 3
semaphore = threading.BoundedSemaphore(MAX_CONNECTIONS)
def use_resource(thread_id):
print(f"Thread {thread_id}: trying to acquire...")
with semaphore:
print(f"Thread {thread_id}: acquired!")
time.sleep(random.randint(1, 3))
print(f"Thread {thread_id}: releasing")
print(f"Thread {thread_id}: done")
# Start 10 threads but only 3 can run at once
threads = []
for i in range(10):
t = threading.Thread(target=use_resource, args=(i,))
threads.append(t)
t.start()
for t in threads:
t.join()import threading
import queue
import time
import random
q = queue.Queue()
def producer():
for i in range(10):
item = f"Item-{i}"
time.sleep(random.uniform(0.1, 0.3))
q.put(item)
print(f"Producer: added {item} (queue size: {q.qsize()})")
# Signal consumers to stop
for _ in range(2):
q.put(None)
def consumer(name):
while True:
item = q.get()
if item is None:
print(f"{name}: received stop signal")
q.task_done()
break
print(f"{name}: processing {item}")
time.sleep(random.uniform(0.2, 0.5))
q.task_done()
# Start threads
producers = [threading.Thread(target=producer) for _ in range(2)]
consumers = [threading.Thread(target=consumer, args=(f"Consumer-{i}",)) for i in range(2)]
for t in producers + consumers:
t.start()
for t in producers + consumers:
t.join()import threading
import time
condition = threading.Condition()
items = []
def producer():
for i in range(5):
time.sleep(0.1)
with condition:
items.append(f"Item-{i}")
print(f"Producer: added {item}, total: {len(items)}")
condition.notify() # Wake one consumer
def consumer(name):
while True:
with condition:
if not items:
print(f"{name}: waiting...")
condition.wait() # Wait for notification
if items:
item = items.pop(0)
print(f"{name}: consumed {item}")
if not items:
break
prod_thread = threading.Thread(target=producer)
cons_thread = threading.Thread(target=consumer, args=("Consumer",))
prod_thread.start()
cons_thread.start()
prod_thread.join()
cons_thread.join()