Execute multiple threads concurrently
You could use the largely undocumented ThreadPool
class in multiprocessing.pool
to do something along these lines:
from multiprocessing.pool import ThreadPool
import random
import threading
import time
MAX_THREADS = 2
print_lock = threading.Lock()
def export_data(fileName):
# simulate writing to file
runtime = random.randint(1, 10)
while runtime:
with print_lock: # prevent overlapped printing
print('[{:2d}] Writing to {}...'.format(runtime, fileName))
time.sleep(1)
runtime -= 1
def export_to_files(filenames):
pool = ThreadPool(processes=MAX_THREADS)
pool.map_async(export_data, filenames)
pool.close()
pool.join() # block until all threads exit
def main():
export_to_files(['out_file1', 'out_file2', 'out_file3'])
if __name__ == "__main__":
main()
Example output:
[ 9] Writing to out_file1...
[ 6] Writing to out_file2...
[ 5] Writing to out_file2...
[ 8] Writing to out_file1...
[ 4] Writing to out_file2...
[ 7] Writing to out_file1...
[ 3] Writing to out_file2...
[ 6] Writing to out_file1...
[ 2] Writing to out_file2...
[ 5] Writing to out_file1...
[ 1] Writing to out_file2...
[ 4] Writing to out_file1...
[ 8] Writing to out_file3...
[ 3] Writing to out_file1...
[ 7] Writing to out_file3...
[ 2] Writing to out_file1...
[ 6] Writing to out_file3...
[ 1] Writing to out_file1...
[ 5] Writing to out_file3...
[ 4] Writing to out_file3...
[ 3] Writing to out_file3...
[ 2] Writing to out_file3...
[ 1] Writing to out_file3...