i am trying to query data from snowflake using pyspark in glue with below code
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
from py4j.java_gateway import java_import
SNOWFLAKE_SOURCE_NAME = "net.snowflake.spark.snowflake"
## @params: [JOB_NAME, URL, ACCOUNT, WAREHOUSE, DB, SCHEMA, USERNAME, PASSWORD]
args = getResolvedOptions(sys.argv, ['JOB_NAME', 'URL', 'ACCOUNT', 'WAREHOUSE', 'DB', 'SCHEMA', 'USERNAME', 'PASSWORD'])
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
job.init(args['JOB_NAME'], args)
java_import(spark._jvm, "net.snowflake.spark.snowflake")
## uj = sc._jvm.net.snowflake.spark.snowflake
spark._jvm.net.snowflake.spark.snowflake.SnowflakeConnectorUtils.enablePushdownSession(spark._jvm.org.apache.spark.sql.SparkSession.builder().getOrCreate())
table="crx--54gfg--hyg65hghg76768_t6ghh75y"
options = {
"sfURL" : args['URL'],
"sfAccount" : args['ACCOUNT'],
"sfUser" : args['USERNAME'],
"sfPassword" : args['PASSWORD'],
"sfDatabase" : args['DB'],
"sfSchema" : args['SCHEMA'],
"sfWarehouse" : args['WAREHOUSE'],
}
query=f"select * from {table}"
df = spark.read \
.format(SNOWFLAKE_SOURCE_NAME ) \
.options(**options) \
.option("query", query) \
.load()
display(df)
but i am getting below error
net.snowflake.client.jdbc.snowflakesqlexception sql compilation error syntax error line 1 at position 111 unexpected '<EOF>'
i think it mostly because of the db table name which have some special characters. how to resole this error?
CodePudding user response:
Using:
query=f'''select * from "{table}"'''
or:
table='''"crx--54gfg--hyg65hghg76768_t6ghh75y"'''
...
query=f"select * from {table}"
The table name contains -
and it should be enclosed with "
.