From ac4cb2ab7a2711860eb969a2aa6d487b1dbb959b Mon Sep 17 00:00:00 2001 From: Lynne Date: Wed, 27 May 2020 21:51:58 +1000 Subject: [PATCH] okay, NOW multithreading works properly, and can i just say: holy fuck it's SO MUCH FASTER --- .gitignore | 3 ++- app/functions.py | 27 ++++++++++++--------------- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/.gitignore b/.gitignore index d0abd4f..dd22fa8 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,5 @@ config.json planning.txt *.pyc /debug -lynnesbian.json \ No newline at end of file +lynnesbian.json +test.py diff --git a/app/functions.py b/app/functions.py index 42da7a7..8fce953 100644 --- a/app/functions.py +++ b/app/functions.py @@ -191,22 +191,19 @@ def make_post(args): db.commit() c.close() -def do_in_pool(function, data, timeout=30, silent=False): - with ProcessPool(max_workers=cfg['service_threads']) as p: - index = 0 - future = p.map(function, data) - iterator = future.result() +def task_done(future): + try: + result = future.result() # blocks until results are ready + except TimeoutError as error: + if not future.silent: print("Timed out on {}.".format(future.function_data)) - while True: - try: - result = next(iterator) - except StopIteration: - # all threads are done - break - except TimeoutError as error: - if not silent: print("Timed out on {}.".format(data[index])) - finally: - index += 1 +def do_in_pool(function, data, timeout=30, silent=False): + with ProcessPool(max_workers=5, max_tasks=10) as pool: + for i in data: + future = pool.schedule(function, args=[i], timeout=timeout) + future.silent = silent + future.function_data = i + future.add_done_callback(task_done) def get_key(): db = MySQLdb.connect(