from pyspark.sql import SparkSession
from pyspark.sql.types import *
ss = SparkSession.builder.getOrCreate()
df_csv = ss.read.csv(‘hdfs://node1:8020/user/hive/warehouse/data/stu.csv’,
schema=‘name string,age int,gender string,phone string,email string,city string,address string’)
df_csv.show()
def func(email):
username = email.split(‘@’)[0]
email_name = email.split(‘@’)[1].split(‘.’)[0]
# todo 新知识:返回多个数据如何做到?
return [username, email_name]
email_func = ss.udf.register(‘email_func’, func, returnType=ArrayType(StringType()))
df_csv.createTempView(‘stu’)
df_csv_show = ss.sql(‘select neme,age,general email_func(email)[1] from stu’)
df_csv_show.show()
df_csv_SQL= df_csv.select(‘name’,‘age’,‘gender’,email_func(‘email’).alias(‘email_data’)[0])
df_csv_SQL.show()