-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy path05-Create DF from Rdd.py
41 lines (30 loc) · 1.18 KB
/
05-Create DF from Rdd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
# Databricks notebook source
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("Spark DataFrames").getOrCreate()
# COMMAND ----------
from pyspark import SparkConf, SparkContext
conf = SparkConf().setAppName("RDD")
sc = SparkContext.getOrCreate(conf=conf)
rdd = sc.textFile('/FileStore/tables/StudentData.csv')
headers = rdd.first()
rdd = rdd.filter(lambda x: x != headers).map(lambda x: x.split(','))
# COMMAND ----------
columns = headers.split(',')
dfRdd = rdd.toDF(columns)
# dfRdd.show()
dfRdd.printSchema()
# COMMAND ----------
from pyspark.sql.types import StructType, StructField, StringType, IntegerType
schema = StructType([
StructField("age", IntegerType(), True),
StructField("gender", StringType(), True),
StructField("name", StringType(), True),
StructField("course", StringType(), True),
StructField("roll", StringType(), True),
StructField("marks", IntegerType(), True),
StructField("email", StringType(), True)
])
# COMMAND ----------
dfRdd = spark.createDataFrame(rdd, schema=schema)
dfRdd.show()
dfRdd.printSchema()