测试spark读取本地和hdfs文件
from pyspark.sql import SparkSession
spark = SparkSession.builder \
.appName("Example PySpark Script") \
.getOrCreate()
# 读取本地csv文件
df = spark.read.csv("/Users/xiaokkk/Desktop/local_projects/spark/intents.csv", header=True, inferSchema=True)
# 显示前几行数据
df.show(5)
# 读取HDFS的csv文件
df = spark.read.csv("hdfs://127.0.0.1:9001/data/intents.csv", header=True, inferSchema=True)
df.show(5)