I\'m trying to connect Spark with amazon Redshift but i\'m getting this error :
My code is as follow :
from pyspark.sql import SQLContext
f
Here is a step by step process for connecting to redshift.
wget "https://s3.amazonaws.com/redshift-downloads/drivers/RedshiftJDBC4-1.2.1.1001.jar"
from pyspark.conf import SparkConf
from pyspark.sql import SparkSession
#initialize the spark session
spark = SparkSession.builder.master("yarn").appName("Connect to redshift").enableHiveSupport().getOrCreate()
sc = spark.sparkContext
sqlContext = HiveContext(sc)
sc._jsc.hadoopConfiguration().set("fs.s3.awsAccessKeyId", "")
sc._jsc.hadoopConfiguration().set("fs.s3.awsSecretAccessKey", "")
taxonomyDf = sqlContext.read \
.format("com.databricks.spark.redshift") \
.option("url", "jdbc:postgresql://url.xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx") \
.option("dbtable", "table_name") \
.option("tempdir", "s3://mybucket/") \
.load()
spark-submit --packages com.databricks:spark-redshift_2.10:0.5.0 --jars RedshiftJDBC4-1.2.1.1001.jar test.py