%scala
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, to_date}
Seq(("20110813"),("20090724")).toDF("Date").select(
col("Date"),
to_date(col("Date"),"yyyy-mm-dd").as("to_date")
).show()
-------- -------
| Date|to_date|
-------- -------
|20110813| null|
|20090724| null|
-------- -------
-------- ----------
| Date| to_date|
-------- ----------
|20110813|2011-01-13|
|20090724|2009-01-24|
-------- ----------
Seq(("20110813"),("20090724")).toDF("Date").select(
col("Date"),
to_date(col("Date"),"yyyymmdd").as("to_date")
).show()
I am trying to convert a string to timestamp, but I am getting always null/default values returned to the date value
CodePudding user response:
You haven't given value for the new column to convert. you should use withColumn to add the new date column and tell him to use Date column values.
val df = Seq(("20110813"),("20090724")).toDF("Date")
val newDf = df.withColumn("to_date", to_date(col("Date"), "yyyy-MM-dd"))
newDf.show()