@@ -53,13 +53,28 @@ protected Frameworks.ConfigBuilder config(CalciteAssert.SchemaSpec... schemaSpec
5353 ImmutableList <Object []> rows =
5454 ImmutableList .of (
5555 new Object [] {
56- java .sql .Timestamp .valueOf ("2024-07-01 00:00:00" ), "web-01" , "us-east" , 45.2 , 120
56+ java .sql .Timestamp .valueOf ("2024-07-01 00:00:00" ),
57+ java .sql .Timestamp .valueOf ("2024-01-15 10:00:00" ),
58+ "web-01" ,
59+ "us-east" ,
60+ 45.2 ,
61+ 120
5762 },
5863 new Object [] {
59- java .sql .Timestamp .valueOf ("2024-07-01 00:01:00" ), "web-02" , "us-west" , 38.7 , 150
64+ java .sql .Timestamp .valueOf ("2024-07-01 00:01:00" ),
65+ java .sql .Timestamp .valueOf ("2024-02-20 11:00:00" ),
66+ "web-02" ,
67+ "us-west" ,
68+ 38.7 ,
69+ 150
6070 },
6171 new Object [] {
62- java .sql .Timestamp .valueOf ("2024-07-01 00:02:00" ), "web-01" , "us-east" , 55.3 , 200
72+ java .sql .Timestamp .valueOf ("2024-07-01 00:02:00" ),
73+ java .sql .Timestamp .valueOf ("2024-03-25 12:00:00" ),
74+ "web-01" ,
75+ "us-east" ,
76+ 55.3 ,
77+ 200
6378 });
6479 schema .add ("events" , new EventsTable (rows ));
6580 return Frameworks .newConfigBuilder ()
@@ -347,6 +362,68 @@ public void testTimechartUsingZeroSpanShouldThrow() {
347362 verifyErrorMessageContains (t , "Zero or negative time interval not supported: 0h" );
348363 }
349364
365+ // ==================== Timechart with Reverse tests ====================
366+ // These tests verify that reverse works correctly with timechart.
367+ // Timechart always adds a sort at the end of its plan, so reverse will
368+ // find the collation via metadata query (tier 1) and flip the sort direction.
369+
370+ @ Test
371+ public void testTimechartWithReverse () {
372+ // Timechart adds ORDER BY @timestamp ASC at the end
373+ // Reverse should flip it to DESC
374+ String ppl = "source=events | timechart count() | reverse" ;
375+ RelNode root = getRelNode (ppl );
376+ // The plan should have two sorts: original ASC from timechart, then DESC from reverse
377+ String expectedLogical =
378+ "LogicalSort(sort0=[$0], dir0=[DESC])\n "
379+ + " LogicalSort(sort0=[$0], dir0=[ASC])\n "
380+ + " LogicalProject(@timestamp=[$0], count()=[$1])\n "
381+ + " LogicalAggregate(group=[{0}], count()=[COUNT()])\n "
382+ + " LogicalProject(@timestamp0=[SPAN($0, 1, 'm')])\n "
383+ + " LogicalFilter(condition=[IS NOT NULL($0)])\n "
384+ + " LogicalTableScan(table=[[scott, events]])\n " ;
385+ verifyLogical (root , expectedLogical );
386+
387+ String expectedSparkSql =
388+ "SELECT *\n "
389+ + "FROM (SELECT SPAN(`@timestamp`, 1, 'm') `@timestamp`, COUNT(*) `count()`\n "
390+ + "FROM `scott`.`events`\n "
391+ + "WHERE `@timestamp` IS NOT NULL\n "
392+ + "GROUP BY SPAN(`@timestamp`, 1, 'm')\n "
393+ + "ORDER BY 1 NULLS LAST) `t3`\n "
394+ + "ORDER BY `@timestamp` DESC NULLS FIRST" ;
395+ verifyPPLToSparkSQL (root , expectedSparkSql );
396+ }
397+
398+ @ Test
399+ public void testTimechartWithCustomTimefieldAndReverse () {
400+ // Timechart with custom timefield should also work with reverse
401+ // The sort is on created_at (the custom field), not @timestamp
402+ String ppl = "source=events | timechart timefield=created_at span=1month count() | reverse" ;
403+ RelNode root = getRelNode (ppl );
404+
405+ // Verify the logical plan shows two sorts: ASC from timechart, DESC from reverse
406+ String expectedLogical =
407+ "LogicalSort(sort0=[$0], dir0=[DESC])\n "
408+ + " LogicalSort(sort0=[$0], dir0=[ASC])\n "
409+ + " LogicalProject(created_at=[$0], count()=[$1])\n "
410+ + " LogicalAggregate(group=[{0}], count()=[COUNT()])\n "
411+ + " LogicalProject(created_at0=[SPAN($1, 1, 'M')])\n "
412+ + " LogicalFilter(condition=[IS NOT NULL($1)])\n "
413+ + " LogicalTableScan(table=[[scott, events]])\n " ;
414+ verifyLogical (root , expectedLogical );
415+
416+ String expectedSparkSql =
417+ "SELECT *\n "
418+ + "FROM (SELECT SPAN(`created_at`, 1, 'M') `created_at`, COUNT(*) `count()`\n "
419+ + "FROM `scott`.`events`\n "
420+ + "WHERE `created_at` IS NOT NULL\n "
421+ + "GROUP BY SPAN(`created_at`, 1, 'M')\n "
422+ + "ORDER BY 1 NULLS LAST) `t3`\n "
423+ + "ORDER BY `created_at` DESC NULLS FIRST" ;
424+ verifyPPLToSparkSQL (root , expectedSparkSql );
425+ }
426+
350427 private UnresolvedPlan parsePPL (String query ) {
351428 PPLSyntaxParser parser = new PPLSyntaxParser ();
352429 AstBuilder astBuilder = new AstBuilder (query );
@@ -363,6 +440,8 @@ public static class EventsTable implements ScannableTable {
363440 .builder ()
364441 .add ("@timestamp" , SqlTypeName .TIMESTAMP )
365442 .nullable (true )
443+ .add ("created_at" , SqlTypeName .TIMESTAMP )
444+ .nullable (true )
366445 .add ("host" , SqlTypeName .VARCHAR )
367446 .nullable (true )
368447 .add ("region" , SqlTypeName .VARCHAR )
0 commit comments