-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathIoCs Bronze.py
84 lines (63 loc) · 2.79 KB
/
IoCs Bronze.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
# Databricks notebook source
import os
from typing import List, Dict, Any, Optional
from delta.tables import DeltaTable
from pyspark.sql import DataFrame, SparkSession
import pyspark.sql.functions as F
# COMMAND ----------
# Variables...
base_dir = "/mnt/cyberdata"
secret_scope = "..."
evhub_secret_key = "..."
evhub_ns_name = "..."
evhub_topic_name = "iocs"
# COMMAND ----------
# MAGIC %run "./IoCs Common"
# COMMAND ----------
num_executors = sc._jsc.sc().getExecutorMemoryStatus().size()-1
num_cores = sum(sc.parallelize((("")*num_executors), num_executors).mapPartitions(lambda p: [os.cpu_count()]).collect())
# COMMAND ----------
spark.sql(f"set spark.sql.shuffle.partitions = {num_cores}")
# COMMAND ----------
import datetime
readConnectionString = dbutils.secrets.get(secret_scope, evhub_secret_key)
eh_sasl = f'kafkashaded.org.apache.kafka.common.security.plain.PlainLoginModule required username="$ConnectionString" password="{readConnectionString}";'
kafka_options = {"kafka.bootstrap.servers": f"{evhub_ns_name}.servicebus.windows.net:9093",
"kafka.sasl.mechanism": "PLAIN",
"kafka.security.protocol": "SASL_SSL",
"kafka.request.timeout.ms": "60000",
"kafka.session.timeout.ms": "30000",
"startingOffsets": "earliest",
"minPartitions": num_cores,
"kafka.sasl.jaas.config": eh_sasl,
"subscribe": evhub_topic_name,
}
df = spark.readStream\
.format("kafka")\
.options(**kafka_options)\
.load()\
.withColumn("value", F.col("value").cast("string"))
# COMMAND ----------
partial_schema = "`@timestamp` timestamp, fileset struct<name:string>, service struct<type:string>, message string"
df2 = df.select("*", F.from_json("value", partial_schema).alias("jsn")) \
.withColumnRenamed("timestamp", "kafka_ts") \
.selectExpr("*", "jsn.`@timestamp` as timestamp", "jsn.fileset.name as dataset",
"jsn.service.type as service", "sha2(jsn.message, 256) as msg_hash") \
.drop("jsn", "timestampType") \
.withColumn("date", F.col("timestamp").cast("date"))
#display(df2)
# COMMAND ----------
def perform_foreach_batch(df: DataFrame, epoch):
return drop_duplicates_with_merge(df, primary_key_columns=["msg_hash"],
path=f"{base_dir}/bronze/threatintel/",
partitionby=["date"], opts={"mergeSchema": "true"},
additional_merge_cond="update.date >= current_date()-10"
)
# COMMAND ----------
checkpoint = f"{base_dir}/checkpoints/threatintel-bronze/"
# COMMAND ----------
df2.writeStream \
.option("checkpointLocation", checkpoint) \
.trigger(availableNow=True) \
.foreachBatch(perform_foreach_batch) \
.start()