You can use multiprocessing.Manager
to help with this. It allows you to create a list that can be shared between processes:
from functools import partial
import multiprocessing
def readFile(shared_variable, filename):
""" Add the parent folder to the database and process the file
"""
path_parts = os.path.split(filename)
dirname = os.path.basename(path_parts[0])
if dirname not in shared_variable:
# Insert into the database
#Other file functions
def main():
""" Walk through files and pass each file to readFile()
"""
manager = multiprocessing.Manager()
shared_variable = manager.list()
queue = multiprocessing.Queue()
pool = multiprocessing.Pool(None, init, [queue])
func = partial(readFile, shared_variable)
for dirpath, dirnames, filenames in os.walk(PATH):
full_path_fnames = map(lambda fn: os.path.join(dirpath, fn),
filenames)
pool.map(func, full_path_fnames)
The partial
is just used to make it easier to pass shared_variable
to each call of readFile
, along with each member of full_path_fnames
via map
.