The code efficiently tracks and displays real-time progress of parallel tasks using python multiprocessing Queue and listener function. Each completed task sends a signal to update progress, ensuring accurate reporting during concurrent execution.
from multiprocessing import Pool, Manager def worker(task): return task ** 2 def listener(queue, total_tasks): completed = 0 for _ in iter(queue.get, None): # Wait for task completion signals completed += 1 print(f"Progress: {completed / total_tasks * 100:.2f}%") if __name__ == "__main__": tasks = list(range(1, 101)) # Example task list with Manager() as manager: queue = manager.Queue() with Pool() as pool: # Start the progress listener in a separate process pool.apply_async(listener, (queue, len(tasks))) # Define worker wrapper to send task completion signals def wrapped_worker(task): result = worker(task) queue.put(1) # Signal completion return result # Process tasks and wait for completion results = pool.map_async(wrapped_worker, tasks) results.wait() queue.put(None) # Signal the listener to stop