== Parsed Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@552f4a05 +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, time#31, method#37, url#44, responsecode#52, split(logs#19, ,, -1)[6] AS bytes#61] +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, time#31, method#37, url#44, split(logs#19, ,, -1)[5] AS responsecode#52] +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, time#31, method#37, split(logs#19, ,, -1)[4] AS url#44] +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, time#31, split(logs#19, ,, -1)[3] AS method#37] +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, split(logs#19, ,, -1)[2] AS time#31] +- Project [logs#19, processing_time#14-T3000ms, idx#22, split(logs#19, ,, -1)[1] AS hostname#26] +- Project [logs#19, processing_time#14-T3000ms, split(logs#19, ,, -1)[0] AS idx#22] +- EventTimeWatermark processing_time#14: timestamp, 3 seconds +- Project [logs#19, processing_time#14] +- Generate explode(split(raw_logs#4, , -1)), false, [logs#19] +- Project [raw_logs#4, timestamp#1, cast(split(raw_logs#4, , -1)[0] as timestamp) AS processing_time#14] +- Project [raw_logs#4, timestamp#1, current_batch_timestamp(1733613704835, TimestampType, Some(Dummy TimeZoneId)) AS processing_time#7] +- Project [value#0 AS raw_logs#4, timestamp#1] +- StreamingDataSourceV2Relation [value#0, timestamp#1], org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1@565361a2, TextSocketV2[host: stream-emulator.data-science-tools.svc.cluster.local, port: 5551], 40, 41 == Analyzed Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@552f4a05 +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, time#31, method#37, url#44, responsecode#52, split(logs#19, ,, -1)[6] AS bytes#61] +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, time#31, method#37, url#44, split(logs#19, ,, -1)[5] AS responsecode#52] +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, time#31, method#37, split(logs#19, ,, -1)[4] AS url#44] +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, time#31, split(logs#19, ,, -1)[3] AS method#37] +- Project [logs#19, processing_time#14-T3000ms, idx#22, hostname#26, split(logs#19, ,, -1)[2] AS time#31] +- Project [logs#19, processing_time#14-T3000ms, idx#22, split(logs#19, ,, -1)[1] AS hostname#26] +- Project [logs#19, processing_time#14-T3000ms, split(logs#19, ,, -1)[0] AS idx#22] +- EventTimeWatermark processing_time#14: timestamp, 3 seconds +- Project [logs#19, processing_time#14] +- Generate explode(split(raw_logs#4, , -1)), false, [logs#19] +- Project [raw_logs#4, timestamp#1, cast(split(raw_logs#4, , -1)[0] as timestamp) AS processing_time#14] +- Project [raw_logs#4, timestamp#1, current_batch_timestamp(1733613704835, TimestampType, Some(Dummy TimeZoneId)) AS processing_time#7] +- Project [value#0 AS raw_logs#4, timestamp#1] +- StreamingDataSourceV2Relation [value#0, timestamp#1], org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1@565361a2, TextSocketV2[host: stream-emulator.data-science-tools.svc.cluster.local, port: 5551], 40, 41 == Optimized Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@552f4a05 +- Project [logs#19, processing_time#14-T3000ms, split(logs#19, ,, -1)[0] AS idx#22, split(logs#19, ,, -1)[1] AS hostname#26, split(logs#19, ,, -1)[2] AS time#31, split(logs#19, ,, -1)[3] AS method#37, split(logs#19, ,, -1)[4] AS url#44, split(logs#19, ,, -1)[5] AS responsecode#52, split(logs#19, ,, -1)[6] AS bytes#61] +- EventTimeWatermark processing_time#14: timestamp, 3 seconds +- Project [logs#19, processing_time#14] +- Generate explode(split(raw_logs#4, , -1)), [0], false, [logs#19] +- Project [value#0 AS raw_logs#4, cast(split(value#0, , -1)[0] as timestamp) AS processing_time#14] +- StreamingDataSourceV2Relation [value#0, timestamp#1], org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1@565361a2, TextSocketV2[host: stream-emulator.data-science-tools.svc.cluster.local, port: 5551], 40, 41 == Physical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@552f4a05 +- *(3) Project [logs#19, processing_time#14-T3000ms, split(logs#19, ,, -1)[0] AS idx#22, split(logs#19, ,, -1)[1] AS hostname#26, split(logs#19, ,, -1)[2] AS time#31, split(logs#19, ,, -1)[3] AS method#37, split(logs#19, ,, -1)[4] AS url#44, split(logs#19, ,, -1)[5] AS responsecode#52, split(logs#19, ,, -1)[6] AS bytes#61] +- EventTimeWatermark processing_time#14: timestamp, 3 seconds +- *(2) Project [logs#19, processing_time#14] +- Generate explode(split(raw_logs#4, , -1)), [processing_time#14], false, [logs#19] +- *(1) Project [value#0 AS raw_logs#4, cast(split(value#0, , -1)[0] as timestamp) AS processing_time#14] +- *(1) Project [value#0, timestamp#1] +- MicroBatchScan[value#0, timestamp#1] class org.apache.spark.sql.execution.streaming.sources.TextSocketTable$$anon$1