diff --git a/server/src/main/java/org/apache/druid/server/coordinator/compact/NewestSegmentFirstIterator.java b/server/src/main/java/org/apache/druid/server/coordinator/compact/NewestSegmentFirstIterator.java index 2c888ad71704..05bae7d23869 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/compact/NewestSegmentFirstIterator.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/compact/NewestSegmentFirstIterator.java @@ -110,9 +110,10 @@ public class NewestSegmentFirstIterator implements CompactionSegmentIterator // For example, if the original is interval of 2020-01-28/2020-02-03 with WEEK granularity // and the configuredSegmentGranularity is MONTH, the segment will be split to two segments // of 2020-01/2020-02 and 2020-02/2020-03. - if (Intervals.ETERNITY.equals(segment.getInterval())) { + if (Intervals.ETERNITY.getStart().equals(segment.getInterval().getStart()) + || Intervals.ETERNITY.getEnd().equals(segment.getInterval().getEnd())) { // This is to prevent the coordinator from crashing as raised in https://github.com/apache/druid/issues/13208 - log.warn("Cannot compact datasource[%s] with ALL granularity", dataSource); + log.warn("Cannot compact datasource[%s] containing segments with partial-ETERNITY intervals", dataSource); return; } for (Interval interval : configuredSegmentGranularity.getIterable(segment.getInterval())) { @@ -429,8 +430,9 @@ private List findInitialSearchInterval( final List searchIntervals = new ArrayList<>(); for (Interval lookupInterval : filteredInterval) { - if (Intervals.ETERNITY.equals(lookupInterval)) { - log.warn("Cannot compact datasource[%s] since interval is ETERNITY.", dataSourceName); + if (Intervals.ETERNITY.getStart().equals(lookupInterval.getStart()) + || Intervals.ETERNITY.getEnd().equals(lookupInterval.getEnd())) { + log.warn("Cannot compact datasource[%s] since interval[%s] coincides with ETERNITY.", dataSourceName, lookupInterval); return Collections.emptyList(); } final List segments = timeline diff --git a/server/src/test/java/org/apache/druid/server/coordinator/compact/NewestSegmentFirstPolicyTest.java b/server/src/test/java/org/apache/druid/server/coordinator/compact/NewestSegmentFirstPolicyTest.java index 04f8f8993de6..553445d9f739 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/compact/NewestSegmentFirstPolicyTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/compact/NewestSegmentFirstPolicyTest.java @@ -1636,6 +1636,70 @@ public void testSkipAllGranularityToDefault() Assert.assertFalse(iterator.hasNext()); } + @Test + public void testSkipFirstHalfEternityToDefault() + { + CompactionSegmentIterator iterator = policy.reset( + ImmutableMap.of(DATA_SOURCE, + createCompactionConfig(10000, + new Period("P0D"), + null + ) + ), + ImmutableMap.of( + DATA_SOURCE, + SegmentTimeline.forSegments(ImmutableSet.of( + new DataSegment( + DATA_SOURCE, + new Interval(DateTimes.MIN, DateTimes.of("2024-01-01")), + "0", + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), + new NumberedShardSpec(0, 0), + 0, + 100) + ) + ) + ), + Collections.emptyMap() + ); + + Assert.assertFalse(iterator.hasNext()); + } + + @Test + public void testSkipSecondHalfOfEternityToDefault() + { + CompactionSegmentIterator iterator = policy.reset( + ImmutableMap.of(DATA_SOURCE, + createCompactionConfig(10000, + new Period("P0D"), + null + ) + ), + ImmutableMap.of( + DATA_SOURCE, + SegmentTimeline.forSegments(ImmutableSet.of( + new DataSegment( + DATA_SOURCE, + new Interval(DateTimes.of("2024-01-01"), DateTimes.MAX), + "0", + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), + new NumberedShardSpec(0, 0), + 0, + 100) + ) + ) + ), + Collections.emptyMap() + ); + + Assert.assertFalse(iterator.hasNext()); + } + @Test public void testSkipAllToAllGranularity() {