digraph G {
0 [labelType="html" label="<br><b>WriteToDataSourceV2</b><br><br>"];
subgraph cluster1 {
isCluster="true";
label="WholeStageCodegen (7)\n \nduration: 57 ms";
2 [labelType="html" label="<b>Sort</b><br><br>sort time: 0 ms<br>peak memory: 64.0 KiB<br>spill size: 0.0 B"];
}
3 [labelType="html" label="<br><b>Exchange</b><br><br>"];
subgraph cluster4 {
isCluster="true";
label="WholeStageCodegen (6)\n \nduration: total (min, med, max (stageId: taskId))\n1.0 s (0 ms, 1 ms, 20 ms (stage 1.0: task 3))";
5 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build total (min, med, max (stageId: taskId))<br>0 ms (0 ms, 0 ms, 0 ms (stage 1.0: task 2))<br>peak memory total (min, med, max (stageId: taskId))<br>100.0 MiB (256.0 KiB, 256.0 KiB, 256.0 KiB (stage 1.0: task 2))"];
}
6 [labelType="html" label="<b>StateStoreSave</b><br><br>memory used by state total (min, med, max (stageId: taskId))<br>81.3 KiB (208.0 B, 208.0 B, 208.0 B (stage 1.0: task 2))<br>estimated size of state only on current version total (min, med, max (stageId: taskId))<br>25.0 KiB (64.0 B, 64.0 B, 64.0 B (stage 1.0: task 2))<br>time to commit changes total (min, med, max (stageId: taskId))<br>16.4 s (26 ms, 29 ms, 114 ms (stage 1.0: task 3))<br>time to remove total (min, med, max (stageId: taskId))<br>0 ms (0 ms, 0 ms, 0 ms (stage 1.0: task 2))<br>time to update total (min, med, max (stageId: taskId))<br>413 ms (0 ms, 1 ms, 18 ms (stage 1.0: task 27))"];
subgraph cluster7 {
isCluster="true";
label="WholeStageCodegen (5)\n \nduration: total (min, med, max (stageId: taskId))\n428 ms (0 ms, 1 ms, 18 ms (stage 1.0: task 27))";
8 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build total (min, med, max (stageId: taskId))<br>0 ms (0 ms, 0 ms, 0 ms (stage 1.0: task 2))<br>peak memory total (min, med, max (stageId: taskId))<br>100.0 MiB (256.0 KiB, 256.0 KiB, 256.0 KiB (stage 1.0: task 2))"];
}
9 [labelType="html" label="<br><b>StateStoreRestore</b><br><br>"];
subgraph cluster10 {
isCluster="true";
label="WholeStageCodegen (4)\n \nduration: total (min, med, max (stageId: taskId))\n1.7 s (1 ms, 3 ms, 53 ms (stage 1.0: task 3))";
11 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build total (min, med, max (stageId: taskId))<br>1 ms (0 ms, 0 ms, 1 ms (stage 1.0: task 3))<br>peak memory total (min, med, max (stageId: taskId))<br>100.0 MiB (256.0 KiB, 256.0 KiB, 256.0 KiB (stage 1.0: task 2))"];
}
12 [labelType="html" label="<br><b>Exchange</b><br><br>"];
subgraph cluster13 {
isCluster="true";
label="WholeStageCodegen (3)\n \nduration: total (min, med, max (stageId: taskId))\n228 ms (84 ms, 144 ms, 144 ms (stage 0.0: task 0))";
14 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build total (min, med, max (stageId: taskId))<br>11 ms (4 ms, 7 ms, 7 ms (stage 0.0: task 0))<br>peak memory total (min, med, max (stageId: taskId))<br>512.0 KiB (256.0 KiB, 256.0 KiB, 256.0 KiB (stage 0.0: task 1))"];
15 [labelType="html" label="<br><b>Project</b><br><br>"];
16 [labelType="html" label="<br><b>Filter</b><br><br>"];
17 [labelType="html" label="<br><b>Expand</b><br><br>"];
18 [labelType="html" label="<br><b>Project</b><br><br>"];
}
19 [labelType="html" label="<br><b>EventTimeWatermark</b><br><br>"];
subgraph cluster20 {
isCluster="true";
label="WholeStageCodegen (2)\n \nduration: total (min, med, max (stageId: taskId))\n430 ms (148 ms, 282 ms, 282 ms (stage 0.0: task 0))";
21 [labelType="html" label="<br><b>Project</b><br><br>"];
}
22 [labelType="html" label="<br><b>Generate</b><br><br>"];
subgraph cluster23 {
isCluster="true";
label="WholeStageCodegen (1)\n \nduration: total (min, med, max (stageId: taskId))\n633 ms (220 ms, 413 ms, 413 ms (stage 0.0: task 0))";
24 [labelType="html" label="<br><b>Project</b><br><br>"];
}
25 [labelType="html" label="<br><b>MicroBatchScan</b><br><br>"];
2->0;
3->2;
5->3;
6->5;
8->6;
9->8;
11->9;
12->11;
14->12;
15->14;
16->15;
17->16;
18->17;
19->18;
21->19;
22->21;
24->22;
25->24;
}
26
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@10942346
Sort [total_bytes#68 DESC NULLS LAST], true, 0
WholeStageCodegen (7)
Exchange rangepartitioning(total_bytes#68 DESC NULLS LAST, 200), true, [id=#103]
HashAggregate(keys=[window#69-T3000ms, hostname#12], functions=[sum(cast(bytes#47 as double))])
WholeStageCodegen (6)
StateStoreSave [window#69-T3000ms, hostname#12], state info [ checkpoint = file:/tmp/spark-checkpoints/state, runId = e0c08a0a-5cf3-468b-888d-995173b23fbd, opId = 0, ver = 0, numPartitions = 200], Complete, 0, 2
HashAggregate(keys=[window#69-T3000ms, hostname#12], functions=[merge_sum(cast(bytes#47 as double))])
WholeStageCodegen (5)
StateStoreRestore [window#69-T3000ms, hostname#12], state info [ checkpoint = file:/tmp/spark-checkpoints/state, runId = e0c08a0a-5cf3-468b-888d-995173b23fbd, opId = 0, ver = 0, numPartitions = 200], 2
HashAggregate(keys=[window#69-T3000ms, hostname#12], functions=[merge_sum(cast(bytes#47 as double))])
WholeStageCodegen (4)
Exchange hashpartitioning(window#69-T3000ms, hostname#12, 200), true, [id=#91]
HashAggregate(keys=[window#69-T3000ms, hostname#12], functions=[partial_sum(cast(bytes#47 as double))])
Project [window#69-T3000ms, hostname#12, bytes#47]
Filter (((isnotnull(timestamp#1-T3000ms) AND isnotnull(window#69-T3000ms)) AND (timestamp#1-T3000ms >= window#69-T3000ms.start)) AND (timestamp#1-T3000ms < window#69-T3000ms.end))
Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#12, bytes#47), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#12, bytes#47)], [window#69-T3000ms, timestamp#1-T3000ms, hostname#12, bytes#47]
Project [timestamp#1-T3000ms, split(logs#5, ,, -1)[1] AS hostname#12, split(logs#5, ,, -1)[6] AS bytes#47]
WholeStageCodegen (3)
EventTimeWatermark timestamp#1: timestamp, 3 seconds
Project [logs#5, timestamp#1]
WholeStageCodegen (2)
Generate explode(split(value#0, , -1)), [timestamp#1], false, [logs#5]
Project [value#0, timestamp#1]
WholeStageCodegen (1)
MicroBatchScan[value#0, timestamp#1] class org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1
== Parsed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@10942346
+- Sort [total_bytes#68 DESC NULLS LAST], true
+- Aggregate [window#69-T3000ms, hostname#12], [window#69-T3000ms AS window#57-T3000ms, hostname#12, sum(cast(bytes#47 as double)) AS total_bytes#68]
+- Filter ((timestamp#1-T3000ms >= window#69-T3000ms.start) AND (timestamp#1-T3000ms < window#69-T3000ms.end))
+- Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(0 as bigint)) - cast(2 as bigint)) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion((((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(0 as bigint)) - cast(2 as bigint)) * 30000000) + 0) + 60000000), LongType, TimestampType)), logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, responsecode#38, bytes#47), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(1 as bigint)) - cast(2 as bigint)) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion((((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(1 as bigint)) - cast(2 as bigint)) * 30000000) + 0) + 60000000), LongType, TimestampType)), logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, responsecode#38, bytes#47)], [window#69-T3000ms, logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, responsecode#38, bytes#47]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, responsecode#38, split(logs#5, ,, -1)[6] AS bytes#47]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, split(logs#5, ,, -1)[5] AS responsecode#38]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, split(logs#5, ,, -1)[4] AS url#30]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, split(logs#5, ,, -1)[3] AS method#23]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, split(logs#5, ,, -1)[2] AS time#17]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, split(logs#5, ,, -1)[1] AS hostname#12]
+- Project [logs#5, timestamp#1-T3000ms, split(logs#5, ,, -1)[0] AS idx#8]
+- EventTimeWatermark timestamp#1: timestamp, 3 seconds
+- Project [logs#5, timestamp#1]
+- Generate explode(split(value#0, , -1)), false, [logs#5]
+- StreamingDataSourceV2Relation [value#0, timestamp#1], org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1@64e6357e, TextSocketV2[host: stream-emulator.data-science-tools.svc.cluster.local, port: 5551], -1, -1
== Analyzed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@10942346
+- Sort [total_bytes#68 DESC NULLS LAST], true
+- Aggregate [window#69-T3000ms, hostname#12], [window#69-T3000ms AS window#57-T3000ms, hostname#12, sum(cast(bytes#47 as double)) AS total_bytes#68]
+- Filter ((timestamp#1-T3000ms >= window#69-T3000ms.start) AND (timestamp#1-T3000ms < window#69-T3000ms.end))
+- Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(0 as bigint)) - cast(2 as bigint)) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion((((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(0 as bigint)) - cast(2 as bigint)) * 30000000) + 0) + 60000000), LongType, TimestampType)), logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, responsecode#38, bytes#47), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(1 as bigint)) - cast(2 as bigint)) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion((((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(1 as bigint)) - cast(2 as bigint)) * 30000000) + 0) + 60000000), LongType, TimestampType)), logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, responsecode#38, bytes#47)], [window#69-T3000ms, logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, responsecode#38, bytes#47]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, responsecode#38, split(logs#5, ,, -1)[6] AS bytes#47]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, url#30, split(logs#5, ,, -1)[5] AS responsecode#38]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, method#23, split(logs#5, ,, -1)[4] AS url#30]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, time#17, split(logs#5, ,, -1)[3] AS method#23]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, hostname#12, split(logs#5, ,, -1)[2] AS time#17]
+- Project [logs#5, timestamp#1-T3000ms, idx#8, split(logs#5, ,, -1)[1] AS hostname#12]
+- Project [logs#5, timestamp#1-T3000ms, split(logs#5, ,, -1)[0] AS idx#8]
+- EventTimeWatermark timestamp#1: timestamp, 3 seconds
+- Project [logs#5, timestamp#1]
+- Generate explode(split(value#0, , -1)), false, [logs#5]
+- StreamingDataSourceV2Relation [value#0, timestamp#1], org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1@64e6357e, TextSocketV2[host: stream-emulator.data-science-tools.svc.cluster.local, port: 5551], -1, -1
== Optimized Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@10942346
+- Sort [total_bytes#68 DESC NULLS LAST], true
+- Aggregate [window#69-T3000ms, hostname#12], [window#69-T3000ms AS window#57-T3000ms, hostname#12, sum(cast(bytes#47 as double)) AS total_bytes#68]
+- Project [window#69-T3000ms, hostname#12, bytes#47]
+- Filter (((isnotnull(timestamp#1-T3000ms) AND isnotnull(window#69-T3000ms)) AND (timestamp#1-T3000ms >= window#69-T3000ms.start)) AND (timestamp#1-T3000ms < window#69-T3000ms.end))
+- Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#12, bytes#47), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#12, bytes#47)], [window#69-T3000ms, timestamp#1-T3000ms, hostname#12, bytes#47]
+- Project [timestamp#1-T3000ms, split(logs#5, ,, -1)[1] AS hostname#12, split(logs#5, ,, -1)[6] AS bytes#47]
+- EventTimeWatermark timestamp#1: timestamp, 3 seconds
+- Project [logs#5, timestamp#1]
+- Generate explode(split(value#0, , -1)), [0], false, [logs#5]
+- StreamingDataSourceV2Relation [value#0, timestamp#1], org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1@64e6357e, TextSocketV2[host: stream-emulator.data-science-tools.svc.cluster.local, port: 5551], -1, -1
== Physical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@10942346
+- *(7) Sort [total_bytes#68 DESC NULLS LAST], true, 0
+- Exchange rangepartitioning(total_bytes#68 DESC NULLS LAST, 200), true, [id=#103]
+- *(6) HashAggregate(keys=[window#69-T3000ms, hostname#12], functions=[sum(cast(bytes#47 as double))], output=[window#57-T3000ms, hostname#12, total_bytes#68])
+- StateStoreSave [window#69-T3000ms, hostname#12], state info [ checkpoint = file:/tmp/spark-checkpoints/state, runId = e0c08a0a-5cf3-468b-888d-995173b23fbd, opId = 0, ver = 0, numPartitions = 200], Complete, 0, 2
+- *(5) HashAggregate(keys=[window#69-T3000ms, hostname#12], functions=[merge_sum(cast(bytes#47 as double))], output=[window#69-T3000ms, hostname#12, sum#77])
+- StateStoreRestore [window#69-T3000ms, hostname#12], state info [ checkpoint = file:/tmp/spark-checkpoints/state, runId = e0c08a0a-5cf3-468b-888d-995173b23fbd, opId = 0, ver = 0, numPartitions = 200], 2
+- *(4) HashAggregate(keys=[window#69-T3000ms, hostname#12], functions=[merge_sum(cast(bytes#47 as double))], output=[window#69-T3000ms, hostname#12, sum#77])
+- Exchange hashpartitioning(window#69-T3000ms, hostname#12, 200), true, [id=#91]
+- *(3) HashAggregate(keys=[window#69-T3000ms, hostname#12], functions=[partial_sum(cast(bytes#47 as double))], output=[window#69-T3000ms, hostname#12, sum#77])
+- *(3) Project [window#69-T3000ms, hostname#12, bytes#47]
+- *(3) Filter (((isnotnull(timestamp#1-T3000ms) AND isnotnull(window#69-T3000ms)) AND (timestamp#1-T3000ms >= window#69-T3000ms.start)) AND (timestamp#1-T3000ms < window#69-T3000ms.end))
+- *(3) Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#12, bytes#47), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#12, bytes#47)], [window#69-T3000ms, timestamp#1-T3000ms, hostname#12, bytes#47]
+- *(3) Project [timestamp#1-T3000ms, split(logs#5, ,, -1)[1] AS hostname#12, split(logs#5, ,, -1)[6] AS bytes#47]
+- EventTimeWatermark timestamp#1: timestamp, 3 seconds
+- *(2) Project [logs#5, timestamp#1]
+- Generate explode(split(value#0, , -1)), [timestamp#1], false, [logs#5]
+- *(1) Project [value#0, timestamp#1]
+- MicroBatchScan[value#0, timestamp#1] class org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1