-
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest.py
66 lines (53 loc) · 1.96 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/7/11 21:13
# @Author :
# @Site :
# @File :
# @Software:
'''
receive data coming from kafka (producer.py) and insert data into delta table
'''
# debug spark—submit by pycharm (Chinese): https://blog.csdn.net/zj1244/article/details/78893837
# (English): https://stackoverflow.com/questions/35560767/pyspark-streaming-with-kafka-in-pycharm
from pyspark.sql import SparkSession
from pyspark.sql.functions import col, expr
from delta.tables import DeltaTable
import shutil
import json
import os
kafka_broker = "localhost:9092"
bucket_prefix = "my-company-bucket-prefix-"
topicName = "test"
def session():
# Enable SQL commands and Update/Delete/Merge for the current spark session.
# we need to set the following configs
spark = SparkSession.builder \
.appName("quickstart") \
.master("local[*]") \
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension") \
.config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog") \
.getOrCreate()
return spark
def writeStream(df,tableInfo):
df.writeStream \
.format("delta") \
.partitionBy(tableInfo.partitionBy) \
.outputMode("append") \
.option("mergeSchema", "true") \
.option("checkpointLocation", "/mnt/datalake/gps/_checkpoints/kafka") \
.start("/mnt/datalake/gps")
def readStream(spark, tableInfo):
df= spark.readStream \
.format("kafka") \
.option("kafka.bootstrap.servers", kafka_broker) \
.option("subscribe", topicName) \
.option("startingOffsets", "latest") \
.load()
return df
class RAWEVENTSTABLE:
def __init__(self,eventName, location):
self.eventName = eventName
self.checkpointlocation = location + eventName + "/checkpoint_location/"
self.tableLocation = location + eventName + "/events/"
self.partitionBy = "date"