I have a python project, whose folder has the structure
main_directory - lib - lib.py
- run - script.py
script.py
To zip the dependencies -
cd base-path-to-python-modules
zip -qr deps.zip ./* -x script.py
Copy deps.zip to hdfs/gs. Use uri when submitting the job as shown below.
Submit a python project (pyspark) using Dataproc' Python connector
from google.cloud import dataproc_v1
from google.cloud.dataproc_v1.gapic.transports import (
job_controller_grpc_transport)
region =
cluster_name =
project_id =
job_transport = (
job_controller_grpc_transport.JobControllerGrpcTransport(
address='{}-dataproc.googleapis.com:443'.format(region)))
dataproc_job_client = dataproc_v1.JobControllerClient(job_transport)
job_file =
# command line for the main job file
args = ['args1', 'arg2']
# required only if main python job file has imports from other modules
# can be one of .py, .zip, or .egg.
addtional_python_files = ['hdfs://path/to/deps.zip', 'gs://path/to/moredeps.zip']
job_details = {
'placement': {
'cluster_name': cluster_name
},
'pyspark_job': {
'main_python_file_uri': job_file,
'args': args,
'python_file_uris': addtional_python_files
}
}
res = dataproc_job_client.submit_job(project_id=project_id,
region=region,
job=job_details)
job_id = res.reference.job_id
print(f'Submitted dataproc job id: {job_id}')