Write below spark code and run
table_name= "sales"
from pyspark.sql.functions import *
#read the sales data
df= spark.read.format("csv").option("header","true").load("Files/SalesFolder/Sales.csv")
#Add OrderMonth and OrderYear columns from OrderDate
df= df.withColumn("OrderYear", year(col("OrderDate"))).withColumn("OrderMonth",month(col("OrderDate")))
#Derive FirstName and LastName Columns
df= df.withColumn("FirstName", split(col("CustomerName"), " ").getItem(0)).withColumn("LastName", split(col("CustomerName"), " ").getItem(1))
#Filter and reorder columns
df= df["SalesOrderNumber", "SalesOrderLineNumber","OrderDate", "OrderYear", "OrderMonth", "FirstName","LastName", "EmailAddress","Item","Quantity", "UnitPrice","TaxAmount"]
#Load the data into a table as a delta parquet format
df.write.format("delta").mode("append").saveAsTable(table_name)