digraph G {
0 [labelType="html" label="<br><b>WriteToDataSourceV2</b><br><br>"];
subgraph cluster1 {
isCluster="true";
label="WholeStageCodegen (7)\n \nduration: total (min, med, max (stageId: taskId))\n94 ms (0 ms, 1 ms, 93 ms (stage 33.0: task 1249))";
2 [labelType="html" label="<b>Sort</b><br><br>sort time total (min, med, max (stageId: taskId))<br>1 ms (0 ms, 0 ms, 1 ms (stage 33.0: task 1249))<br>peak memory total (min, med, max (stageId: taskId))<br>128.2 MiB (64.0 KiB, 64.1 MiB, 64.1 MiB (stage 33.0: task 1248))<br>spill size total (min, med, max (stageId: taskId))<br>0.0 B (0.0 B, 0.0 B, 0.0 B (stage 33.0: task 1248))"];
}
3 [labelType="html" label="<b>Exchange</b><br><br>shuffle records written: 6<br>shuffle write time total (min, med, max (stageId: taskId))<br>2 ms (0 ms, 0 ms, 0 ms (stage 32.0: task 1055))<br>records read: 6<br>local bytes read total (min, med, max (stageId: taskId))<br>399.0 B (0.0 B, 185.0 B, 214.0 B (stage 33.0: task 1248))<br>fetch wait time total (min, med, max (stageId: taskId))<br>0 ms (0 ms, 0 ms, 0 ms (stage 33.0: task 1248))<br>remote bytes read total (min, med, max (stageId: taskId))<br>161.0 B (0.0 B, 0.0 B, 161.0 B (stage 33.0: task 1249))<br>local blocks read: 4<br>remote blocks read: 2<br>data size total (min, med, max (stageId: taskId))<br>384.0 B (0.0 B, 0.0 B, 72.0 B (stage 32.0: task 1106))<br>shuffle bytes written total (min, med, max (stageId: taskId))<br>560.0 B (0.0 B, 0.0 B, 107.0 B (stage 32.0: task 1106))"];
subgraph cluster4 {
isCluster="true";
label="WholeStageCodegen (6)\n \nduration: total (min, med, max (stageId: taskId))\n752 ms (1 ms, 1 ms, 27 ms (stage 30.0: task 855))";
5 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build total (min, med, max (stageId: taskId))<br>36 ms (0 ms, 0 ms, 24 ms (stage 30.0: task 855))<br>peak memory total (min, med, max (stageId: taskId))<br>868.0 MiB (256.0 KiB, 256.0 KiB, 64.3 MiB (stage 32.0: task 1055))<br>number of output rows: 12<br>avg hash probe bucket list iters (min, med, max (stageId: taskId)):<br>(1, 1, 1 (stage 32.0: task 1055))"];
}
6 [labelType="html" label="<b>StateStoreSave</b><br><br>number of total state rows: 12<br>memory used by state total (min, med, max (stageId: taskId))<br>121.3 KiB (232.0 B, 376.0 B, 600.0 B (stage 30.0: task 905))<br>count of cache hit on states cache in provider: 1,200<br>number of output rows: 12<br>estimated size of state only on current version total (min, med, max (stageId: taskId))<br>36.9 KiB (88.0 B, 88.0 B, 312.0 B (stage 32.0: task 1106))<br>time to commit changes total (min, med, max (stageId: taskId))<br>19.7 s (27 ms, 38 ms, 148 ms (stage 32.0: task 1219))<br>time to remove total (min, med, max (stageId: taskId))<br>0 ms (0 ms, 0 ms, 0 ms (stage 32.0: task 1048))<br>number of updated state rows: 12<br>time to update total (min, med, max (stageId: taskId))<br>452 ms (0 ms, 0 ms, 66 ms (stage 30.0: task 855))"];
subgraph cluster7 {
isCluster="true";
label="WholeStageCodegen (5)\n \nduration: total (min, med, max (stageId: taskId))\n458 ms (0 ms, 0 ms, 66 ms (stage 30.0: task 855))";
8 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build total (min, med, max (stageId: taskId))<br>105 ms (0 ms, 0 ms, 38 ms (stage 30.0: task 855))<br>peak memory total (min, med, max (stageId: taskId))<br>868.0 MiB (256.0 KiB, 256.0 KiB, 64.3 MiB (stage 32.0: task 1055))<br>number of output rows: 12<br>avg hash probe bucket list iters (min, med, max (stageId: taskId)):<br>(1, 1, 1 (stage 32.0: task 1055))"];
}
9 [labelType="html" label="<b>StateStoreRestore</b><br><br>number of output rows: 12"];
subgraph cluster10 {
isCluster="true";
label="WholeStageCodegen (4)\n \nduration: total (min, med, max (stageId: taskId))\n1.3 s (1 ms, 2 ms, 77 ms (stage 30.0: task 954))";
11 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build total (min, med, max (stageId: taskId))<br>128 ms (0 ms, 0 ms, 39 ms (stage 30.0: task 954))<br>peak memory total (min, med, max (stageId: taskId))<br>868.0 MiB (256.0 KiB, 256.0 KiB, 64.3 MiB (stage 32.0: task 1055))<br>number of output rows: 12<br>avg hash probe bucket list iters (min, med, max (stageId: taskId)):<br>(1, 1, 1 (stage 32.0: task 1055))"];
}
12 [labelType="html" label="<b>Exchange</b><br><br>shuffle records written: 8<br>shuffle write time total (min, med, max (stageId: taskId))<br>12 ms (5 ms, 6 ms, 6 ms (stage 29.0: task 847))<br>records read: 16<br>local bytes read total (min, med, max (stageId: taskId))<br>750.0 B (0.0 B, 0.0 B, 107.0 B (stage 32.0: task 1106))<br>fetch wait time total (min, med, max (stageId: taskId))<br>40 ms (0 ms, 0 ms, 20 ms (stage 30.0: task 855))<br>remote bytes read total (min, med, max (stageId: taskId))<br>804.0 B (0.0 B, 0.0 B, 109.0 B (stage 32.0: task 1160))<br>local blocks read: 8<br>remote blocks read: 8<br>data size total (min, med, max (stageId: taskId))<br>528.0 B (144.0 B, 384.0 B, 384.0 B (stage 29.0: task 846))<br>shuffle bytes written total (min, med, max (stageId: taskId))<br>777.0 B (217.0 B, 560.0 B, 560.0 B (stage 29.0: task 846))"];
subgraph cluster13 {
isCluster="true";
label="WholeStageCodegen (3)\n \nduration: total (min, med, max (stageId: taskId))\n59 ms (12 ms, 47 ms, 47 ms (stage 29.0: task 847))";
14 [labelType="html" label="<b>HashAggregate</b><br><br>time in aggregation build total (min, med, max (stageId: taskId))<br>42 ms (3 ms, 39 ms, 39 ms (stage 29.0: task 847))<br>peak memory total (min, med, max (stageId: taskId))<br>128.5 MiB (64.3 MiB, 64.3 MiB, 64.3 MiB (stage 29.0: task 846))<br>number of output rows: 8<br>avg hash probe bucket list iters (min, med, max (stageId: taskId)):<br>(1, 1, 1 (stage 29.0: task 846))"];
15 [labelType="html" label="<br><b>Project</b><br><br>"];
16 [labelType="html" label="<b>Filter</b><br><br>number of output rows: 38"];
17 [labelType="html" label="<b>Expand</b><br><br>number of output rows: 38"];
18 [labelType="html" label="<br><b>Project</b><br><br>"];
}
19 [labelType="html" label="<br><b>EventTimeWatermark</b><br><br>"];
subgraph cluster20 {
isCluster="true";
label="WholeStageCodegen (2)\n \nduration: total (min, med, max (stageId: taskId))\n50 ms (7 ms, 43 ms, 43 ms (stage 29.0: task 847))";
21 [labelType="html" label="<br><b>Project</b><br><br>"];
}
22 [labelType="html" label="<b>Generate</b><br><br>number of output rows: 19"];
subgraph cluster23 {
isCluster="true";
label="WholeStageCodegen (1)\n \nduration: total (min, med, max (stageId: taskId))\n52 ms (8 ms, 44 ms, 44 ms (stage 29.0: task 847))";
24 [labelType="html" label="<br><b>Project</b><br><br>"];
}
25 [labelType="html" label="<b>MicroBatchScan</b><br><br>number of output rows: 17"];
2->0;
3->2;
5->3;
6->5;
8->6;
9->8;
11->9;
12->11;
14->12;
15->14;
16->15;
17->16;
18->17;
19->18;
21->19;
22->21;
24->22;
25->24;
}
26
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@65e535fe
Sort [total_bytes#858 DESC NULLS LAST], true, 0
WholeStageCodegen (7)
Exchange rangepartitioning(total_bytes#858 DESC NULLS LAST, 200), true, [id=#1101]
HashAggregate(keys=[window#859-T3000ms, hostname#140], functions=[sum(cast(bytes#175 as double))])
WholeStageCodegen (6)
StateStoreSave [window#859-T3000ms, hostname#140], state info [ checkpoint = file:/tmp/temporary-fcb72abb-9143-4d38-a65d-25baf17e34c7/state, runId = 49431645-20e3-4737-9dcc-8f334da22f1d, opId = 0, ver = 1, numPartitions = 200], Complete, 0, 2
HashAggregate(keys=[window#859-T3000ms, hostname#140], functions=[merge_sum(cast(bytes#175 as double))])
WholeStageCodegen (5)
StateStoreRestore [window#859-T3000ms, hostname#140], state info [ checkpoint = file:/tmp/temporary-fcb72abb-9143-4d38-a65d-25baf17e34c7/state, runId = 49431645-20e3-4737-9dcc-8f334da22f1d, opId = 0, ver = 1, numPartitions = 200], 2
HashAggregate(keys=[window#859-T3000ms, hostname#140], functions=[merge_sum(cast(bytes#175 as double))])
WholeStageCodegen (4)
Exchange hashpartitioning(window#859-T3000ms, hostname#140, 200), true, [id=#1089]
HashAggregate(keys=[window#859-T3000ms, hostname#140], functions=[partial_sum(cast(bytes#175 as double))])
Project [window#859-T3000ms, hostname#140, bytes#175]
Filter (((isnotnull(timestamp#1-T3000ms) AND isnotnull(window#859-T3000ms)) AND (timestamp#1-T3000ms >= window#859-T3000ms.start)) AND (timestamp#1-T3000ms < window#859-T3000ms.end))
Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#140, bytes#175), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#140, bytes#175)], [window#859-T3000ms, timestamp#1-T3000ms, hostname#140, bytes#175]
Project [timestamp#1-T3000ms, split(logs#5, ,, -1)[1] AS hostname#140, split(logs#5, ,, -1)[6] AS bytes#175]
WholeStageCodegen (3)
EventTimeWatermark timestamp#1: timestamp, 3 seconds
Project [logs#5, timestamp#1]
WholeStageCodegen (2)
Generate explode(split(value#0, , -1)), [timestamp#1], false, [logs#5]
Project [value#0, timestamp#1]
WholeStageCodegen (1)
MicroBatchScan[value#0, timestamp#1] class org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1
== Parsed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@65e535fe
+- Sort [total_bytes#858 DESC NULLS LAST], true
+- Aggregate [window#859-T3000ms, hostname#140], [window#859-T3000ms AS window#847-T3000ms, hostname#140, sum(cast(bytes#175 as double)) AS total_bytes#858]
+- Filter ((timestamp#1-T3000ms >= window#859-T3000ms.start) AND (timestamp#1-T3000ms < window#859-T3000ms.end))
+- Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(0 as bigint)) - cast(2 as bigint)) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion((((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(0 as bigint)) - cast(2 as bigint)) * 30000000) + 0) + 60000000), LongType, TimestampType)), logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, responsecode#166, bytes#175), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(1 as bigint)) - cast(2 as bigint)) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion((((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(1 as bigint)) - cast(2 as bigint)) * 30000000) + 0) + 60000000), LongType, TimestampType)), logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, responsecode#166, bytes#175)], [window#859-T3000ms, logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, responsecode#166, bytes#175]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, responsecode#166, split(logs#5, ,, -1)[6] AS bytes#175]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, split(logs#5, ,, -1)[5] AS responsecode#166]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, split(logs#5, ,, -1)[4] AS url#158]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, split(logs#5, ,, -1)[3] AS method#151]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, split(logs#5, ,, -1)[2] AS time#145]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, split(logs#5, ,, -1)[1] AS hostname#140]
+- Project [logs#5, timestamp#1-T3000ms, split(logs#5, ,, -1)[0] AS idx#136]
+- EventTimeWatermark timestamp#1: timestamp, 3 seconds
+- Project [logs#5, timestamp#1]
+- Generate explode(split(value#0, , -1)), false, [logs#5]
+- StreamingDataSourceV2Relation [value#0, timestamp#1], org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1@6b3ca49a, TextSocketV2[host: stream-emulator.data-science-tools.svc.cluster.local, port: 5551], -1, 16
== Analyzed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@65e535fe
+- Sort [total_bytes#858 DESC NULLS LAST], true
+- Aggregate [window#859-T3000ms, hostname#140], [window#859-T3000ms AS window#847-T3000ms, hostname#140, sum(cast(bytes#175 as double)) AS total_bytes#858]
+- Filter ((timestamp#1-T3000ms >= window#859-T3000ms.start) AND (timestamp#1-T3000ms < window#859-T3000ms.end))
+- Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(0 as bigint)) - cast(2 as bigint)) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion((((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(0 as bigint)) - cast(2 as bigint)) * 30000000) + 0) + 60000000), LongType, TimestampType)), logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, responsecode#166, bytes#175), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(1 as bigint)) - cast(2 as bigint)) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion((((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / cast(30000000 as double))) END + cast(1 as bigint)) - cast(2 as bigint)) * 30000000) + 0) + 60000000), LongType, TimestampType)), logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, responsecode#166, bytes#175)], [window#859-T3000ms, logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, responsecode#166, bytes#175]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, responsecode#166, split(logs#5, ,, -1)[6] AS bytes#175]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, url#158, split(logs#5, ,, -1)[5] AS responsecode#166]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, method#151, split(logs#5, ,, -1)[4] AS url#158]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, time#145, split(logs#5, ,, -1)[3] AS method#151]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, hostname#140, split(logs#5, ,, -1)[2] AS time#145]
+- Project [logs#5, timestamp#1-T3000ms, idx#136, split(logs#5, ,, -1)[1] AS hostname#140]
+- Project [logs#5, timestamp#1-T3000ms, split(logs#5, ,, -1)[0] AS idx#136]
+- EventTimeWatermark timestamp#1: timestamp, 3 seconds
+- Project [logs#5, timestamp#1]
+- Generate explode(split(value#0, , -1)), false, [logs#5]
+- StreamingDataSourceV2Relation [value#0, timestamp#1], org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1@6b3ca49a, TextSocketV2[host: stream-emulator.data-science-tools.svc.cluster.local, port: 5551], -1, 16
== Optimized Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@65e535fe
+- Sort [total_bytes#858 DESC NULLS LAST], true
+- Aggregate [window#859-T3000ms, hostname#140], [window#859-T3000ms AS window#847-T3000ms, hostname#140, sum(cast(bytes#175 as double)) AS total_bytes#858]
+- Project [window#859-T3000ms, hostname#140, bytes#175]
+- Filter (((isnotnull(timestamp#1-T3000ms) AND isnotnull(window#859-T3000ms)) AND (timestamp#1-T3000ms >= window#859-T3000ms.start)) AND (timestamp#1-T3000ms < window#859-T3000ms.end))
+- Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#140, bytes#175), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#140, bytes#175)], [window#859-T3000ms, timestamp#1-T3000ms, hostname#140, bytes#175]
+- Project [timestamp#1-T3000ms, split(logs#5, ,, -1)[1] AS hostname#140, split(logs#5, ,, -1)[6] AS bytes#175]
+- EventTimeWatermark timestamp#1: timestamp, 3 seconds
+- Project [logs#5, timestamp#1]
+- Generate explode(split(value#0, , -1)), [0], false, [logs#5]
+- StreamingDataSourceV2Relation [value#0, timestamp#1], org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1@6b3ca49a, TextSocketV2[host: stream-emulator.data-science-tools.svc.cluster.local, port: 5551], -1, 16
== Physical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@65e535fe
+- *(7) Sort [total_bytes#858 DESC NULLS LAST], true, 0
+- Exchange rangepartitioning(total_bytes#858 DESC NULLS LAST, 200), true, [id=#1101]
+- *(6) HashAggregate(keys=[window#859-T3000ms, hostname#140], functions=[sum(cast(bytes#175 as double))], output=[window#847-T3000ms, hostname#140, total_bytes#858])
+- StateStoreSave [window#859-T3000ms, hostname#140], state info [ checkpoint = file:/tmp/temporary-fcb72abb-9143-4d38-a65d-25baf17e34c7/state, runId = 49431645-20e3-4737-9dcc-8f334da22f1d, opId = 0, ver = 1, numPartitions = 200], Complete, 0, 2
+- *(5) HashAggregate(keys=[window#859-T3000ms, hostname#140], functions=[merge_sum(cast(bytes#175 as double))], output=[window#859-T3000ms, hostname#140, sum#869])
+- StateStoreRestore [window#859-T3000ms, hostname#140], state info [ checkpoint = file:/tmp/temporary-fcb72abb-9143-4d38-a65d-25baf17e34c7/state, runId = 49431645-20e3-4737-9dcc-8f334da22f1d, opId = 0, ver = 1, numPartitions = 200], 2
+- *(4) HashAggregate(keys=[window#859-T3000ms, hostname#140], functions=[merge_sum(cast(bytes#175 as double))], output=[window#859-T3000ms, hostname#140, sum#869])
+- Exchange hashpartitioning(window#859-T3000ms, hostname#140, 200), true, [id=#1089]
+- *(3) HashAggregate(keys=[window#859-T3000ms, hostname#140], functions=[partial_sum(cast(bytes#175 as double))], output=[window#859-T3000ms, hostname#140, sum#869])
+- *(3) Project [window#859-T3000ms, hostname#140, bytes#175]
+- *(3) Filter (((isnotnull(timestamp#1-T3000ms) AND isnotnull(window#859-T3000ms)) AND (timestamp#1-T3000ms >= window#859-T3000ms.start)) AND (timestamp#1-T3000ms < window#859-T3000ms.end))
+- *(3) Expand [ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 0) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#140, bytes#175), ArrayBuffer(named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 0), LongType, TimestampType), end, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) as double) = (cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) THEN (CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) + 1) ELSE CEIL((cast((precisetimestampconversion(timestamp#1-T3000ms, TimestampType, LongType) - 0) as double) / 3.0E7)) END + 1) - 2) * 30000000) + 60000000), LongType, TimestampType)), timestamp#1-T3000ms, hostname#140, bytes#175)], [window#859-T3000ms, timestamp#1-T3000ms, hostname#140, bytes#175]
+- *(3) Project [timestamp#1-T3000ms, split(logs#5, ,, -1)[1] AS hostname#140, split(logs#5, ,, -1)[6] AS bytes#175]
+- EventTimeWatermark timestamp#1: timestamp, 3 seconds
+- *(2) Project [logs#5, timestamp#1]
+- Generate explode(split(value#0, , -1)), [timestamp#1], false, [logs#5]
+- *(1) Project [value#0, timestamp#1]
+- MicroBatchScan[value#0, timestamp#1] class org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1