This code snippet shows how to convert string to date.
In PySpark/Python, we can use unix_timestamp and from_unixtime functions.
Code snippet
from pyspark.sql import SparkSession
from pyspark.sql.functions import unix_timestamp, from_unixtime
appName = "PySpark Date Parse Example"
master = "local"
# Create Spark session with Hive supported.
spark = SparkSession.builder \
.appName(appName) \
.master(master) \
.getOrCreate()
df = spark.createDataFrame([('2019-06-01',)], ['DATE_STR_COL'])
df.select(from_unixtime(unix_timestamp(df.DATE_STR_COL, 'yyyy-MM-dd')).alias('DATE_COL'))
df.show()