diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java index 82a40a9a2b..01b8d65cd1 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java @@ -36,16 +36,21 @@ import com.google.common.collect.ImmutableList; import java.util.List; +import org.jetbrains.annotations.Nullable; import org.polypheny.db.algebra.AlgCollationTraitDef; import org.polypheny.db.algebra.AlgInput; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalMaterializedView; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.schema.trait.ModelTrait; +import org.polypheny.db.view.ViewManager; /** @@ -120,5 +125,17 @@ public static LogicalRelScan create( AlgOptCluster cluster, final CatalogEntity return new LogicalRelScan( cluster, traitSet, entity ); } + + @Override + public AlgNode unfoldView( @Nullable AlgNode parent, int index, AlgOptCluster cluster ) { + if ( false ) { + LogicalTable catalogTable = entity.unwrap( LogicalTable.class ); + if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW && ((LogicalMaterializedView) catalogTable).isOrdered() ) { + return ViewManager.orderMaterialized( this ); + } + } + return super.unfoldView( parent, index, cluster ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java index 7a1de0d274..02c9be1f6d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java @@ -78,7 +78,7 @@ public boolean containsView() { @Override public AlgNode unfoldView( @Nullable AlgNode parent, int index, AlgOptCluster cluster ) { - AlgNode unfolded = unfoldView( cluster ); + AlgNode unfolded = unfoldView( cluster ).unfoldView( this, 0, cluster ); if ( parent != null ) { parent.replaceInput( index, unfolded ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/DateRangeRules.java b/core/src/main/java/org/polypheny/db/algebra/rules/DateRangeRules.java index 88058a552b..203e6e2b42 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/DateRangeRules.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/DateRangeRules.java @@ -604,13 +604,13 @@ private Calendar timestampValue( RexLiteral timeLiteral ) { switch ( timeLiteral.getPolyType() ) { case TIMESTAMP_WITH_LOCAL_TIME_ZONE: final TimeZone tz = TimeZone.getTimeZone( this.timeZone ); - return Util.calendar( Functions.timestampWithLocalTimeZoneToTimestamp( timeLiteral.value.asTimeStamp().sinceEpoch, tz ) ); + return Util.calendar( Functions.timestampWithLocalTimeZoneToTimestamp( timeLiteral.value.asTimeStamp().milliSinceEpoch, tz ) ); case TIMESTAMP: - return Util.calendar( timeLiteral.value.asTimeStamp().sinceEpoch ); + return Util.calendar( timeLiteral.value.asTimeStamp().milliSinceEpoch ); case DATE: // Cast date to timestamp with local time zone //final DateString d = timeLiteral.getValueAs( DateString.class ); - return Util.calendar( timeLiteral.value.asDate().sinceEpoch ); + return Util.calendar( timeLiteral.value.asDate().milliSinceEpoch ); default: throw Util.unexpected( timeLiteral.getPolyType() ); } diff --git a/core/src/main/java/org/polypheny/db/functions/Functions.java b/core/src/main/java/org/polypheny/db/functions/Functions.java index aebcc85d55..72391a660b 100644 --- a/core/src/main/java/org/polypheny/db/functions/Functions.java +++ b/core/src/main/java/org/polypheny/db/functions/Functions.java @@ -137,7 +137,7 @@ public class Functions { private static final DecimalFormat DOUBLE_FORMAT = NumberUtil.decimalFormat( "0.0E0" ); - private static final TimeZone LOCAL_TZ = TimeZone.getDefault(); + public static final TimeZone LOCAL_TZ = TimeZone.getDefault(); private static final Function1, Enumerable> LIST_AS_ENUMERABLE = Linq4j::asEnumerable; @@ -991,17 +991,17 @@ public static PolyBoolean lt( PolyNumber b0, PolyNumber b1 ) { public static PolyBoolean lt( PolyTemporal b0, PolyTemporal b1 ) { - return lt( PolyLong.of( b0.getSinceEpoch() ), PolyLong.of( b1.getSinceEpoch() ) ); + return lt( PolyLong.of( b0.getMilliSinceEpoch() ), PolyLong.of( b1.getMilliSinceEpoch() ) ); } public static PolyBoolean lt( PolyTemporal b0, PolyNumber b1 ) { - return lt( PolyLong.of( b0.getSinceEpoch() ), b1 ); + return lt( PolyLong.of( b0.getMilliSinceEpoch() ), b1 ); } public static PolyBoolean lt( PolyNumber b0, PolyTemporal b1 ) { - return lt( b0, PolyLong.of( b1.getSinceEpoch() ) ); + return lt( b0, PolyLong.of( b1.getMilliSinceEpoch() ) ); } // <= @@ -1166,7 +1166,7 @@ public static PolyBoolean ge( PolyNumber b0, PolyNumber b1 ) { public static PolyBoolean ge( PolyTemporal b0, PolyTemporal b1 ) { - return ge( PolyLong.of( b0.getSinceEpoch() ), PolyLong.of( b1.getSinceEpoch() ) ); + return ge( PolyLong.of( b0.getMilliSinceEpoch() ), PolyLong.of( b1.getMilliSinceEpoch() ) ); } diff --git a/core/src/main/java/org/polypheny/db/functions/TemporalFunctions.java b/core/src/main/java/org/polypheny/db/functions/TemporalFunctions.java index a834df75a9..ddf1720270 100644 --- a/core/src/main/java/org/polypheny/db/functions/TemporalFunctions.java +++ b/core/src/main/java/org/polypheny/db/functions/TemporalFunctions.java @@ -32,7 +32,7 @@ public class TemporalFunctions { @SuppressWarnings("unused") public static PolyString unixDateToString( PolyDate date ) { - return PolyString.of( DateTimeUtils.unixDateToString( date.sinceEpoch.intValue() ) ); + return PolyString.of( DateTimeUtils.unixDateToString( date.milliSinceEpoch.intValue() ) ); } @@ -44,7 +44,7 @@ public static PolyString unixTimeToString( PolyTime time ) { @SuppressWarnings("unused") public static PolyString unixTimestampToString( PolyTimeStamp timeStamp ) { - return PolyString.of( DateTimeUtils.unixTimestampToString( timeStamp.sinceEpoch ) ); + return PolyString.of( DateTimeUtils.unixTimestampToString( timeStamp.milliSinceEpoch ) ); } @@ -62,31 +62,31 @@ public static PolyString intervalDayTimeToString( PolyInterval interval, TimeUni @SuppressWarnings("unused") public static PolyLong unixDateExtract( TimeUnitRange unitRange, PolyTemporal date ) { - return PolyLong.of( DateTimeUtils.unixDateExtract( unitRange, date.getSinceEpoch() ) ); + return PolyLong.of( DateTimeUtils.unixDateExtract( unitRange, date.getMilliSinceEpoch() ) ); } @SuppressWarnings("unused") public static PolyLong unixDateFloor( TimeUnitRange unitRange, PolyDate date ) { - return PolyLong.of( DateTimeUtils.unixDateFloor( unitRange, date.sinceEpoch ) ); + return PolyLong.of( DateTimeUtils.unixDateFloor( unitRange, date.milliSinceEpoch ) ); } @SuppressWarnings("unused") public static PolyLong unixDateCeil( TimeUnitRange unitRange, PolyDate date ) { - return PolyLong.of( DateTimeUtils.unixDateCeil( unitRange, date.sinceEpoch ) ); + return PolyLong.of( DateTimeUtils.unixDateCeil( unitRange, date.milliSinceEpoch ) ); } @SuppressWarnings("unused") public static PolyTimeStamp unixTimestampFloor( TimeUnitRange unitRange, PolyTimeStamp timeStamp ) { - return PolyTimeStamp.of( DateTimeUtils.unixTimestampFloor( unitRange, timeStamp.sinceEpoch ) ); + return PolyTimeStamp.of( DateTimeUtils.unixTimestampFloor( unitRange, timeStamp.milliSinceEpoch ) ); } @SuppressWarnings("unused") public static PolyTimeStamp unixTimestampCeil( TimeUnitRange unitRange, PolyTimeStamp timeStamp ) { - return PolyTimeStamp.of( DateTimeUtils.unixTimestampFloor( unitRange, timeStamp.sinceEpoch ) ); + return PolyTimeStamp.of( DateTimeUtils.unixTimestampFloor( unitRange, timeStamp.milliSinceEpoch ) ); } @@ -95,9 +95,9 @@ public static PolyTimeStamp unixTimestampCeil( TimeUnitRange unitRange, PolyTime */ @SuppressWarnings("unused") public static PolyTimeStamp addMonths( PolyTimeStamp timestamp, PolyNumber m ) { - final long millis = DateTimeUtils.floorMod( timestamp.sinceEpoch, DateTimeUtils.MILLIS_PER_DAY ); - final PolyDate x = addMonths( PolyDate.of( timestamp.sinceEpoch - millis / DateTimeUtils.MILLIS_PER_DAY ), m ); - return PolyTimeStamp.of( x.sinceEpoch * DateTimeUtils.MILLIS_PER_DAY + millis ); + final long millis = DateTimeUtils.floorMod( timestamp.milliSinceEpoch, DateTimeUtils.MILLIS_PER_DAY ); + final PolyDate x = addMonths( PolyDate.of( timestamp.milliSinceEpoch - millis / DateTimeUtils.MILLIS_PER_DAY ), m ); + return PolyTimeStamp.of( x.milliSinceEpoch * DateTimeUtils.MILLIS_PER_DAY + millis ); } @@ -106,9 +106,9 @@ public static PolyTimeStamp addMonths( PolyTimeStamp timestamp, PolyNumber m ) { */ @SuppressWarnings("unused") public static PolyDate addMonths( PolyDate date, PolyNumber m ) { - int y0 = (int) DateTimeUtils.unixDateExtract( TimeUnitRange.YEAR, date.sinceEpoch / DateTimeUtils.MILLIS_PER_DAY ); - int m0 = (int) DateTimeUtils.unixDateExtract( TimeUnitRange.MONTH, date.sinceEpoch / DateTimeUtils.MILLIS_PER_DAY ); - int d0 = (int) DateTimeUtils.unixDateExtract( TimeUnitRange.DAY, date.sinceEpoch / DateTimeUtils.MILLIS_PER_DAY ); + int y0 = (int) DateTimeUtils.unixDateExtract( TimeUnitRange.YEAR, date.milliSinceEpoch / DateTimeUtils.MILLIS_PER_DAY ); + int m0 = (int) DateTimeUtils.unixDateExtract( TimeUnitRange.MONTH, date.milliSinceEpoch / DateTimeUtils.MILLIS_PER_DAY ); + int d0 = (int) DateTimeUtils.unixDateExtract( TimeUnitRange.DAY, date.milliSinceEpoch / DateTimeUtils.MILLIS_PER_DAY ); int y = m.intValue() / 12; y0 += y; m0 += m.intValue() - y * 12; @@ -152,19 +152,19 @@ private static int lastDay( int y, int m ) { */ @SuppressWarnings("unused") public static PolyNumber subtractMonths( PolyDate date0, PolyDate date1 ) { - if ( date0.sinceEpoch < date1.sinceEpoch ) { + if ( date0.milliSinceEpoch < date1.milliSinceEpoch ) { return subtractMonths( date1, date0 ).negate(); } // Start with an estimate. // Since no month has more than 31 days, the estimate is <= the true value. - long m = (date0.sinceEpoch - date1.sinceEpoch) / 31; + long m = (date0.milliSinceEpoch - date1.milliSinceEpoch) / 31; for ( ; ; ) { - long date2 = addMonths( date1, PolyLong.of( m ) ).sinceEpoch; - if ( date2 >= date0.sinceEpoch ) { + long date2 = addMonths( date1, PolyLong.of( m ) ).milliSinceEpoch; + if ( date2 >= date0.milliSinceEpoch ) { return PolyLong.of( m ); } - long date3 = addMonths( date1, PolyLong.of( m + 1 ) ).sinceEpoch; - if ( date3 > date0.sinceEpoch ) { + long date3 = addMonths( date1, PolyLong.of( m + 1 ) ).milliSinceEpoch; + if ( date3 > date0.milliSinceEpoch ) { return PolyLong.of( m ); } ++m; @@ -174,12 +174,12 @@ public static PolyNumber subtractMonths( PolyDate date0, PolyDate date1 ) { @SuppressWarnings("unused") public static PolyNumber subtractMonths( PolyTimeStamp t0, PolyTimeStamp t1 ) { - final long millis0 = floorMod( PolyLong.of( t0.sinceEpoch ), PolyInteger.of( DateTimeUtils.MILLIS_PER_DAY ) ).longValue(); - final int d0 = floorDiv( PolyLong.of( t0.sinceEpoch - millis0 ), PolyInteger.of( DateTimeUtils.MILLIS_PER_DAY ) ).intValue(); - final long millis1 = floorMod( PolyLong.of( t1.sinceEpoch ), PolyLong.of( DateTimeUtils.MILLIS_PER_DAY ) ).longValue(); - final int d1 = floorDiv( PolyLong.of( t1.sinceEpoch - millis1 ), PolyInteger.of( DateTimeUtils.MILLIS_PER_DAY ) ).intValue(); + final long millis0 = floorMod( PolyLong.of( t0.milliSinceEpoch ), PolyInteger.of( DateTimeUtils.MILLIS_PER_DAY ) ).longValue(); + final int d0 = floorDiv( PolyLong.of( t0.milliSinceEpoch - millis0 ), PolyInteger.of( DateTimeUtils.MILLIS_PER_DAY ) ).intValue(); + final long millis1 = floorMod( PolyLong.of( t1.milliSinceEpoch ), PolyLong.of( DateTimeUtils.MILLIS_PER_DAY ) ).longValue(); + final int d1 = floorDiv( PolyLong.of( t1.milliSinceEpoch - millis1 ), PolyInteger.of( DateTimeUtils.MILLIS_PER_DAY ) ).intValue(); PolyNumber x = subtractMonths( PolyDate.of( d0 ), PolyDate.of( d1 ) ); - final long d2 = addMonths( PolyDate.of( d1 ), x ).sinceEpoch; + final long d2 = addMonths( PolyDate.of( d1 ), x ).milliSinceEpoch; if ( d2 == d0 && millis0 < millis1 ) { x = x.subtract( PolyInteger.of( 1 ) ); } diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index ea3d9144b4..558881feff 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -25,10 +25,13 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import lombok.Getter; +import lombok.EqualsAndHashCode; +import lombok.Value; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; +import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.core.common.Scan; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer; import org.polypheny.db.algebra.logical.document.LogicalDocumentAggregate; @@ -62,40 +65,39 @@ import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.transaction.Statement; +import org.polypheny.db.catalog.logistic.NamespaceType; /** * Universal routing alg shuttle class to extract partition and column information from AlgNode. */ +@EqualsAndHashCode(callSuper = true) @Slf4j +@Value public class LogicalAlgAnalyzeShuttle extends AlgShuttleImpl { - protected final LogicalAlgAnalyzeRexShuttle rexShuttle; - @Getter - //protected final Map> filterMap = new HashMap<>(); // logical scanId (ScanId) -> List partitionsValue - protected final Map> partitionValueFilterPerScan = new HashMap<>(); // logical scanId (ScanId) -> (logical tableId -> List partitionsValue) - @Getter - protected final HashSet hashBasis = new HashSet<>(); - @Getter - protected final LinkedHashMap availableColumns = new LinkedHashMap<>(); // column id -> schemaName.tableName.ColumnName - protected final HashMap availableColumnsWithTable = new HashMap<>(); // columnId -> tableId - @Getter - protected final List entityId = new ArrayList<>(); - private final Statement statement; + public LogicalAlgAnalyzeRexShuttle rexShuttle; - @Getter - protected HashMap> ordered; + public Map> partitionValueFilterPerScan = new HashMap<>(); // logical scanId (ScanId) -> (logical tableId -> List partitionsValue) - @Getter - public int rowCount; + public Set hashBasis = new HashSet<>(); + public Map availableColumns = new LinkedHashMap<>(); // column id -> schemaName.tableName.ColumnName - public LogicalAlgAnalyzeShuttle( Statement statement ) { - this.statement = statement; + public Map availableColumnsWithTable = new HashMap<>(); // columnId -> tableId + + public Map> modifiedEntities = new HashMap<>(); + + public Map> scannedEntities = new HashMap<>(); + + public List entityIds = new ArrayList<>(); + + + public LogicalAlgAnalyzeShuttle() { this.rexShuttle = new LogicalAlgAnalyzeRexShuttle(); } @@ -138,6 +140,22 @@ public String getQueryName() { } + private void addScannedEntity( NamespaceType type, long entityId ) { + if ( !scannedEntities.containsKey( type ) ) { + scannedEntities.put( type, new HashSet<>() ); + } + scannedEntities.get( type ).add( entityId ); + } + + + private void addModifiedEntity( NamespaceType type, long entityId ) { + if ( !modifiedEntities.containsKey( type ) ) { + modifiedEntities.put( type, new HashSet<>() ); + } + modifiedEntities.get( type ).add( entityId ); + } + + @Override public AlgNode visit( LogicalAggregate aggregate ) { hashBasis.add( "LogicalAggregate#" + aggregate.getAggCallList() ); @@ -148,6 +166,9 @@ public AlgNode visit( LogicalAggregate aggregate ) { @Override public AlgNode visit( LogicalLpgModify modify ) { hashBasis.add( modify.getClass().getSimpleName() ); + + addModifiedEntity( modify.getEntity().namespaceType, getLogicalId( modify ) ); + return super.visit( modify ); } @@ -156,6 +177,8 @@ public AlgNode visit( LogicalLpgModify modify ) { public AlgNode visit( LogicalLpgScan scan ) { hashBasis.add( scan.getClass().getSimpleName() + "#" + scan.entity.id ); + addScannedEntity( scan.getEntity().namespaceType, scan.entity.id ); + return super.visit( scan ); } @@ -221,6 +244,8 @@ public AlgNode visit( LogicalLpgTransformer transformer ) { public AlgNode visit( LogicalDocumentModify modify ) { hashBasis.add( "LogicalDocumentModify" ); + addModifiedEntity( modify.getEntity().namespaceType, getLogicalId( modify ) ); + return super.visit( modify ); } @@ -256,6 +281,9 @@ public AlgNode visit( LogicalDocumentProject project ) { @Override public AlgNode visit( LogicalDocumentScan scan ) { hashBasis.add( "LogicalDocumentScan#" + scan.entity.id ); + + addScannedEntity( scan.entity.namespaceType, getLogicalId( scan ) ); + return super.visit( scan ); } @@ -294,6 +322,9 @@ public AlgNode visit( RelScan scan ) { throw new RuntimeException(); } hashBasis.add( "Scan#" + scan.getEntity().id ); + + addScannedEntity( scan.getEntity().namespaceType, getLogicalId( scan ) ); + // get available columns for every table scan this.getAvailableColumns( scan ); @@ -301,6 +332,16 @@ public AlgNode visit( RelScan scan ) { } + private static long getLogicalId( Scan scan ) { + return scan.entity.isLogical() ? scan.entity.id : scan.entity.unwrap( AllocationEntity.class ).getLogicalId(); + } + + + private static long getLogicalId( Modify modify ) { + return modify.entity.isLogical() ? modify.entity.id : modify.entity.unwrap( AllocationEntity.class ).getLogicalId(); + } + + @Override public AlgNode visit( LogicalFilter filter ) { hashBasis.add( "LogicalFilter" ); @@ -383,6 +424,9 @@ public AlgNode visit( LogicalExchange exchange ) { @Override public AlgNode visit( LogicalRelModify modify ) { hashBasis.add( "LogicalModify" ); + + addModifiedEntity( modify.getEntity().namespaceType, getLogicalId( modify ) ); + // e.g. inserts only have underlying values and need to attach the table correctly this.getAvailableColumns( modify ); return visitChildren( modify ); @@ -397,7 +441,6 @@ public AlgNode visit( AlgNode other ) { private void getAvailableColumns( AlgNode scan ) { - this.entityId.add( scan.getEntity().id ); final LogicalTable table = scan.getEntity().unwrap( LogicalTable.class ); if ( table != null ) { final List columns = Catalog.getInstance().getSnapshot().rel().getColumns( table.id ); diff --git a/core/src/main/java/org/polypheny/db/rex/RexBuilder.java b/core/src/main/java/org/polypheny/db/rex/RexBuilder.java index 6a8ee7f5d1..5b4bfa8aa9 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexBuilder.java +++ b/core/src/main/java/org/polypheny/db/rex/RexBuilder.java @@ -1080,7 +1080,7 @@ public RexLiteral makeTimeLiteral( TimeString time, int precision ) { public RexLiteral makeTimeLiteral( PolyTime time, int precision ) { return makeLiteral( - PolyTime.of( time.getSinceEpoch() ), + PolyTime.of( time.getMilliSinceEpoch() ), typeFactory.createPolyType( PolyType.TIME, precision ), PolyType.TIME ); } @@ -1110,7 +1110,7 @@ public RexLiteral makeTimestampLiteral( TimestampString timestamp, int precision public RexLiteral makeTimestampLiteral( PolyTimeStamp timestamp, int precision ) { return makeLiteral( - PolyTimeStamp.of( timestamp.sinceEpoch ), + PolyTimeStamp.of( timestamp.milliSinceEpoch ), typeFactory.createPolyType( PolyType.TIMESTAMP, precision ), PolyType.TIMESTAMP ); } diff --git a/core/src/main/java/org/polypheny/db/routing/LogicalQueryInformation.java b/core/src/main/java/org/polypheny/db/routing/LogicalQueryInformation.java index b10777a5e8..b7c3d303ef 100644 --- a/core/src/main/java/org/polypheny/db/routing/LogicalQueryInformation.java +++ b/core/src/main/java/org/polypheny/db/routing/LogicalQueryInformation.java @@ -16,14 +16,19 @@ package org.polypheny.db.routing; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import java.util.List; import java.util.Map; +import java.util.Set; +import org.polypheny.db.catalog.logistic.NamespaceType; /** * This is the result of the analyze step in the query pipeline. * The class contains logical information about a query. */ + public interface LogicalQueryInformation { /** @@ -56,11 +61,16 @@ public interface LogicalQueryInformation { /** * @return gets the query class. */ - String getQueryClass(); + String getQueryHash(); - /** - * @return Gets a list of all accessed tables. - */ - List getTablesIds(); + ImmutableMap> getScannedEntities(); + + ImmutableMap> getModifiedEntities(); + + ImmutableSet getAllModifiedEntities(); + + ImmutableSet getAllScannedEntities(); + + ImmutableSet getAllEntities(); } diff --git a/core/src/main/java/org/polypheny/db/type/entity/PolyBigDecimal.java b/core/src/main/java/org/polypheny/db/type/entity/PolyBigDecimal.java index 473568b55e..4ba531cb21 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/PolyBigDecimal.java +++ b/core/src/main/java/org/polypheny/db/type/entity/PolyBigDecimal.java @@ -104,7 +104,7 @@ public static PolyBigDecimal convert( PolyValue value ) { if ( value.isNumber() ) { return PolyBigDecimal.of( value.asNumber().bigDecimalValue() ); } else if ( value.isTemporal() ) { - return PolyBigDecimal.of( value.asTemporal().getSinceEpoch() ); + return PolyBigDecimal.of( value.asTemporal().getMilliSinceEpoch() ); } else if ( value.isString() ) { return PolyBigDecimal.of( value.asString().value ); } diff --git a/core/src/main/java/org/polypheny/db/type/entity/PolyBoolean.java b/core/src/main/java/org/polypheny/db/type/entity/PolyBoolean.java index 38d2bc28b3..a5e7c1f94b 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/PolyBoolean.java +++ b/core/src/main/java/org/polypheny/db/type/entity/PolyBoolean.java @@ -77,6 +77,9 @@ public static PolyBoolean of( boolean value ) { public static PolyBoolean convert( Object value ) { + if ( value == null ) { + return null; + } if ( value instanceof PolyValue ) { if ( ((PolyValue) value).isBoolean() ) { return ((PolyValue) value).asBoolean(); diff --git a/core/src/main/java/org/polypheny/db/type/entity/PolyDate.java b/core/src/main/java/org/polypheny/db/type/entity/PolyDate.java index bfe0ff406b..8550db6ab2 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/PolyDate.java +++ b/core/src/main/java/org/polypheny/db/type/entity/PolyDate.java @@ -42,12 +42,12 @@ public class PolyDate extends PolyTemporal { @Getter - public Long sinceEpoch; + public Long milliSinceEpoch; - public PolyDate( long sinceEpoch ) { + public PolyDate( long milliSinceEpoch ) { super( PolyType.DATE ); - this.sinceEpoch = sinceEpoch; + this.milliSinceEpoch = milliSinceEpoch; } @@ -67,12 +67,12 @@ public static PolyDate ofNullable( java.sql.Date date ) { public Date asDefaultDate() { - return new Date( sinceEpoch ); + return new Date( milliSinceEpoch ); } public java.sql.Date asSqlDate() { - return new java.sql.Date( sinceEpoch ); + return new java.sql.Date( milliSinceEpoch ); } @@ -87,13 +87,13 @@ public int compareTo( @NotNull PolyValue o ) { return -1; } - return Long.compare( sinceEpoch, o.asDate().sinceEpoch ); + return Long.compare( milliSinceEpoch, o.asDate().milliSinceEpoch ); } @Override public Expression asExpression() { - return Expressions.new_( PolyLong.class, Expressions.constant( sinceEpoch ) ); + return Expressions.new_( PolyLong.class, Expressions.constant( milliSinceEpoch ) ); } @@ -107,7 +107,7 @@ public static class PolyDateSerializer implements JsonSerializer, Json @Override public JsonElement serialize( PolyDate src, Type typeOfSrc, JsonSerializationContext context ) { - return new JsonPrimitive( src.sinceEpoch ); + return new JsonPrimitive( src.milliSinceEpoch ); } diff --git a/core/src/main/java/org/polypheny/db/type/entity/PolyLong.java b/core/src/main/java/org/polypheny/db/type/entity/PolyLong.java index 2b2e4fe327..0a834ca580 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/PolyLong.java +++ b/core/src/main/java/org/polypheny/db/type/entity/PolyLong.java @@ -180,7 +180,7 @@ public static PolyLong convert( PolyValue value ) { if ( value.isNumber() ) { return PolyLong.of( value.asNumber().longValue() ); } else if ( value.isTemporal() ) { - return PolyLong.of( value.asTemporal().getSinceEpoch() ); + return PolyLong.of( value.asTemporal().getMilliSinceEpoch() ); } else if ( value.isString() ) { return PolyLong.of( Long.parseLong( value.asString().value ) ); } diff --git a/core/src/main/java/org/polypheny/db/type/entity/PolyTime.java b/core/src/main/java/org/polypheny/db/type/entity/PolyTime.java index 080cb82731..2169b484c9 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/PolyTime.java +++ b/core/src/main/java/org/polypheny/db/type/entity/PolyTime.java @@ -105,7 +105,7 @@ public PolySerializable copy() { @Override - public Long getSinceEpoch() { + public Long getMilliSinceEpoch() { return Long.valueOf( ofDay ); } diff --git a/core/src/main/java/org/polypheny/db/type/entity/PolyTimeStamp.java b/core/src/main/java/org/polypheny/db/type/entity/PolyTimeStamp.java index 258f8edb0b..037389a865 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/PolyTimeStamp.java +++ b/core/src/main/java/org/polypheny/db/type/entity/PolyTimeStamp.java @@ -44,12 +44,12 @@ public class PolyTimeStamp extends PolyTemporal { @Getter - public Long sinceEpoch; + public Long milliSinceEpoch; - public PolyTimeStamp( Long sinceEpoch ) { + public PolyTimeStamp( Long milliSinceEpoch ) { super( PolyType.TIMESTAMP ); - this.sinceEpoch = sinceEpoch; + this.milliSinceEpoch = milliSinceEpoch; } @@ -89,7 +89,7 @@ public static PolyTimeStamp of( Date date ) { public Timestamp asSqlTimestamp() { - return new Timestamp( sinceEpoch ); + return new Timestamp( milliSinceEpoch ); } @@ -99,13 +99,13 @@ public int compareTo( @NotNull PolyValue o ) { return -1; } - return Long.compare( sinceEpoch, o.asTimeStamp().sinceEpoch ); + return Long.compare( milliSinceEpoch, o.asTimeStamp().milliSinceEpoch ); } @Override public Expression asExpression() { - return Expressions.new_( PolyTimeStamp.class, Expressions.constant( sinceEpoch ) ); + return Expressions.new_( PolyTimeStamp.class, Expressions.constant( milliSinceEpoch ) ); } @@ -119,7 +119,7 @@ public static PolyTimeStamp convert( PolyValue value ) { if ( value.isNumber() ) { return PolyTimeStamp.of( value.asNumber().longValue() ); } else if ( value.isTemporal() ) { - return PolyTimeStamp.of( value.asTemporal().getSinceEpoch() ); + return PolyTimeStamp.of( value.asTemporal().getMilliSinceEpoch() ); } throw new NotImplementedException( "convert " + PolyTimeStamp.class.getSimpleName() ); } @@ -141,7 +141,7 @@ public PolyTimeStamp deserialize( JsonElement json, Type typeOfT, JsonDeserializ @Override public JsonElement serialize( PolyTimeStamp src, Type typeOfSrc, JsonSerializationContext context ) { - return new JsonPrimitive( src.sinceEpoch ); + return new JsonPrimitive( src.milliSinceEpoch ); } } diff --git a/core/src/main/java/org/polypheny/db/type/entity/category/PolyTemporal.java b/core/src/main/java/org/polypheny/db/type/entity/category/PolyTemporal.java index 25238d278b..0760900c77 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/category/PolyTemporal.java +++ b/core/src/main/java/org/polypheny/db/type/entity/category/PolyTemporal.java @@ -21,6 +21,7 @@ import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.NonFinal; +import org.apache.calcite.avatica.util.DateTimeUtils; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.entity.PolyValue; @@ -29,7 +30,7 @@ @NonFinal public abstract class PolyTemporal extends PolyValue { - public abstract Long getSinceEpoch(); + public abstract Long getMilliSinceEpoch(); public PolyTemporal( PolyType type ) { @@ -37,9 +38,14 @@ public PolyTemporal( PolyType type ) { } + public long getDaysSinceEpoch() { + return getMilliSinceEpoch() / DateTimeUtils.MILLIS_PER_DAY; + } + + public Calendar toCalendar() { GregorianCalendar cal = new GregorianCalendar(); - cal.setTimeInMillis( getSinceEpoch() ); + cal.setTimeInMillis( getMilliSinceEpoch() ); return cal; } diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index cbe96b622f..ca348a4370 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -17,6 +17,7 @@ package org.polypheny.db.view; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import lombok.Getter; import lombok.NonNull; @@ -68,7 +69,7 @@ public abstract void addData( @NonNull AlgRoot algRoot, @NonNull LogicalMaterializedView materializedView ); - public abstract void addTables( Transaction transaction, List ids ); + public abstract void notifyModifiedTables( Transaction transaction, Collection ids ); public abstract void updateData( Transaction transaction, long viewId ); @@ -91,7 +92,7 @@ public static class TableUpdateVisitor extends AlgShuttleImpl { @Override public AlgNode visit( LogicalRelModify modify ) { if ( modify.getOperation() != Modify.Operation.MERGE ) { - if ( (modify.getEntity() != null) ) { + if ( modify.getEntity() != null ) { if ( modify.getEntity().unwrap( PhysicalEntity.class ) != null ) { ids.add( modify.getEntity().unwrap( PhysicalEntity.class ).id ); } else if ( modify.getEntity().unwrap( AllocationEntity.class ) != null ) { diff --git a/core/src/main/java/org/polypheny/db/view/ViewManager.java b/core/src/main/java/org/polypheny/db/view/ViewManager.java index 991bc54617..e8145aa86f 100644 --- a/core/src/main/java/org/polypheny/db/view/ViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/ViewManager.java @@ -55,7 +55,7 @@ public class ViewManager { - private static LogicalSort orderMaterialized( AlgNode other ) { + public static LogicalSort orderMaterialized( AlgNode other ) { int positionPrimary = other.getRowType().getFieldList().size() - 1; AlgFieldCollation algFieldCollation = new AlgFieldCollation( positionPrimary, Direction.ASCENDING ); AlgCollations.of( algFieldCollation ); @@ -248,9 +248,10 @@ private void handleNodeType( AlgNode other ) { public AlgNode checkNode( AlgNode other ) { - if ( other instanceof LogicalRelViewScan ) { + /*if ( other instanceof LogicalRelViewScan ) { return expandViewNode( other ); - } else if ( doesSubstituteOrderBy && other instanceof LogicalRelScan ) { + } else */ + if ( doesSubstituteOrderBy && other instanceof LogicalRelScan ) { LogicalTable catalogTable = other.getEntity().unwrap( LogicalTable.class ); if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW && ((LogicalMaterializedView) catalogTable).isOrdered() ) { return orderMaterialized( other ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 18de729a19..77ed49af8e 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -32,8 +32,6 @@ import java.util.Map.Entry; import java.util.Objects; import java.util.Set; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -147,15 +145,11 @@ import org.polypheny.db.util.DeadlockException; import org.polypheny.db.util.Pair; import org.polypheny.db.view.MaterializedViewManager; -import org.polypheny.db.view.MaterializedViewManager.TableUpdateVisitor; -import org.polypheny.db.view.ViewManager.ViewVisitor; @Slf4j public abstract class AbstractQueryProcessor implements QueryProcessor, ExecutionTimeObserver { - BlockingQueue eventQueue = new LinkedBlockingQueue<>(); - protected static final boolean ENABLE_BINDABLE = false; protected static final boolean ENABLE_COLLATION_TRAIT = true; protected static final boolean ENABLE_ENUMERABLE = true; @@ -177,7 +171,7 @@ protected AbstractQueryProcessor( Statement statement ) { @Override public void executionTime( String reference, long nanoTime ) { StatementEvent event = statement.getMonitoringEvent(); - if ( reference.equals( event.getLogicalQueryInformation().getQueryClass() ) ) { + if ( reference.equals( event.getLogicalQueryInformation().getQueryHash() ) ) { event.setExecutionTime( nanoTime ); } } @@ -259,7 +253,10 @@ private ProposedImplementations prepareQueryList( AlgRoot logicalRoot, Al List> results = new ArrayList<>(); List generatedCodes = new ArrayList<>(); - // + if ( isAnalyze ) { + statement.getProcessingDuration().start( "Expand Views" ); + } + // Check for view if ( logicalRoot.info.containsView ) { logicalRoot = logicalRoot.unfoldView(); @@ -267,6 +264,7 @@ private ProposedImplementations prepareQueryList( AlgRoot logicalRoot, Al // Analyze step if ( isAnalyze ) { + statement.getProcessingDuration().stop( "Expand Views" ); statement.getProcessingDuration().start( "Analyze" ); } @@ -280,24 +278,13 @@ private ProposedImplementations prepareQueryList( AlgRoot logicalRoot, Al ExecutionTimeMonitor executionTimeMonitor = new ExecutionTimeMonitor(); if ( RoutingManager.POST_COST_AGGREGATION_ACTIVE.getBoolean() ) { // Subscribe only when aggregation is active - executionTimeMonitor.subscribe( this, logicalQueryInformation.getQueryClass() ); + executionTimeMonitor.subscribe( this, logicalQueryInformation.getQueryHash() ); } if ( isAnalyze ) { statement.getProcessingDuration().start( "Expand Views" ); } - // Check if the algRoot includes Views or Materialized Views and replaces what necessary - // View: replace LogicalViewScan with underlying information - // Materialized View: add order by if Materialized View includes Order by - ViewVisitor viewVisitor = new ViewVisitor( false ); - logicalRoot = viewVisitor.startSubstitution( logicalRoot ); - - // Update which tables where changed used for Materialized Views - TableUpdateVisitor visitor = new TableUpdateVisitor(); - logicalRoot.alg.accept( visitor ); - MaterializedViewManager.getInstance().addTables( statement.getTransaction(), visitor.getIds() ); - if ( isAnalyze ) { statement.getProcessingDuration().stop( "Expand Views" ); statement.getProcessingDuration().start( "Parameter Validation" ); @@ -313,7 +300,7 @@ private ProposedImplementations prepareQueryList( AlgRoot logicalRoot, Al } if ( isRouted ) { - proposedRoutingPlans = Lists.newArrayList( new ProposedRoutingPlanImpl( logicalRoot, logicalQueryInformation.getQueryClass() ) ); + proposedRoutingPlans = Lists.newArrayList( new ProposedRoutingPlanImpl( logicalRoot, logicalQueryInformation.getQueryHash() ) ); } else { // // Locking @@ -370,7 +357,7 @@ private ProposedImplementations prepareQueryList( AlgRoot logicalRoot, Al Set partitionIds = logicalQueryInformation.getAccessedPartitions().values().stream() .flatMap( List::stream ) .collect( Collectors.toSet() ); - List routingPlansCached = RoutingPlanCache.INSTANCE.getIfPresent( logicalQueryInformation.getQueryClass(), partitionIds ); + List routingPlansCached = RoutingPlanCache.INSTANCE.getIfPresent( logicalQueryInformation.getQueryHash(), partitionIds ); if ( !routingPlansCached.isEmpty() && routingPlansCached.stream().noneMatch( p -> p.physicalPlacementsOfPartitions.isEmpty() ) ) { proposedRoutingPlans = routeCached( indexLookupRoot, routingPlansCached, statement, logicalQueryInformation, isAnalyze ); } @@ -965,16 +952,16 @@ private List route( AlgRoot logicalRoot, Statement statemen return routeDocument( logicalRoot, queryInformation, dmlRouter ); } else if ( logicalRoot.alg instanceof LogicalRelModify ) { AlgNode routedDml = dmlRouter.routeDml( (LogicalRelModify) logicalRoot.alg, statement ); - return Lists.newArrayList( new ProposedRoutingPlanImpl( routedDml, logicalRoot, queryInformation.getQueryClass() ) ); + return Lists.newArrayList( new ProposedRoutingPlanImpl( routedDml, logicalRoot, queryInformation.getQueryHash() ) ); } else if ( logicalRoot.alg instanceof ConditionalExecute ) { AlgNode routedConditionalExecute = dmlRouter.handleConditionalExecute( logicalRoot.alg, statement, queryInformation ); - return Lists.newArrayList( new ProposedRoutingPlanImpl( routedConditionalExecute, logicalRoot, queryInformation.getQueryClass() ) ); + return Lists.newArrayList( new ProposedRoutingPlanImpl( routedConditionalExecute, logicalRoot, queryInformation.getQueryHash() ) ); } else if ( logicalRoot.alg instanceof BatchIterator ) { AlgNode routedIterator = dmlRouter.handleBatchIterator( logicalRoot.alg, statement, queryInformation ); - return Lists.newArrayList( new ProposedRoutingPlanImpl( routedIterator, logicalRoot, queryInformation.getQueryClass() ) ); + return Lists.newArrayList( new ProposedRoutingPlanImpl( routedIterator, logicalRoot, queryInformation.getQueryHash() ) ); } else if ( logicalRoot.alg instanceof ConstraintEnforcer ) { AlgNode routedConstraintEnforcer = dmlRouter.handleConstraintEnforcer( logicalRoot.alg, statement, queryInformation ); - return Lists.newArrayList( new ProposedRoutingPlanImpl( routedConstraintEnforcer, logicalRoot, queryInformation.getQueryClass() ) ); + return Lists.newArrayList( new ProposedRoutingPlanImpl( routedConstraintEnforcer, logicalRoot, queryInformation.getQueryHash() ) ); } else { final List proposedPlans = new ArrayList<>(); if ( statement.getTransaction().isAnalyze() ) { @@ -985,7 +972,7 @@ private List route( AlgRoot logicalRoot, Statement statemen try{ List builders = router.route( logicalRoot, statement, queryInformation ); List plans = builders.stream() - .map( builder -> new ProposedRoutingPlanImpl( builder, logicalRoot, queryInformation.getQueryClass(), router.getClass() ) ) + .map( builder -> new ProposedRoutingPlanImpl( builder, logicalRoot, queryInformation.getQueryHash(), router.getClass() ) ) .collect( Collectors.toList() ); proposedPlans.addAll( plans ); } catch ( Throwable e ){ @@ -1021,22 +1008,22 @@ private List route( AlgRoot logicalRoot, Statement statemen private List routeGraph( AlgRoot logicalRoot, LogicalQueryInformation queryInformation, DmlRouter dmlRouter ) { if ( logicalRoot.alg instanceof LogicalLpgModify ) { AlgNode routedDml = dmlRouter.routeGraphDml( (LogicalLpgModify) logicalRoot.alg, statement ); - return Lists.newArrayList( new ProposedRoutingPlanImpl( routedDml, logicalRoot, queryInformation.getQueryClass() ) ); + return Lists.newArrayList( new ProposedRoutingPlanImpl( routedDml, logicalRoot, queryInformation.getQueryHash() ) ); } RoutedAlgBuilder builder = RoutedAlgBuilder.create( statement, logicalRoot.alg.getCluster() ); AlgNode node = RoutingManager.getInstance().getRouters().get( 0 ).routeGraph( builder, (AlgNode & LpgAlg) logicalRoot.alg, statement ); - return Lists.newArrayList( new ProposedRoutingPlanImpl( builder.stackSize() == 0 ? node : builder.build(), logicalRoot, queryInformation.getQueryClass() ) ); + return Lists.newArrayList( new ProposedRoutingPlanImpl( builder.stackSize() == 0 ? node : builder.build(), logicalRoot, queryInformation.getQueryHash() ) ); } private List routeDocument( AlgRoot logicalRoot, LogicalQueryInformation queryInformation, DmlRouter dmlRouter ) { if ( logicalRoot.alg instanceof LogicalDocumentModify ) { AlgNode routedDml = dmlRouter.routeDocumentDml( (LogicalDocumentModify) logicalRoot.alg, statement, null ); - return Lists.newArrayList( new ProposedRoutingPlanImpl( routedDml, logicalRoot, queryInformation.getQueryClass() ) ); + return Lists.newArrayList( new ProposedRoutingPlanImpl( routedDml, logicalRoot, queryInformation.getQueryHash() ) ); } RoutedAlgBuilder builder = RoutedAlgBuilder.create( statement, logicalRoot.alg.getCluster() ); AlgNode node = RoutingManager.getInstance().getRouters().get( 0 ).routeDocument( builder, logicalRoot.alg, statement ); - return Lists.newArrayList( new ProposedRoutingPlanImpl( builder.stackSize() == 0 ? node : builder.build(), logicalRoot, queryInformation.getQueryClass() ) ); + return Lists.newArrayList( new ProposedRoutingPlanImpl( builder.stackSize() == 0 ? node : builder.build(), logicalRoot, queryInformation.getQueryHash() ) ); } @@ -1066,7 +1053,7 @@ private List routeCached( AlgRoot logicalRoot, List> partitionValueFilterPerScan = analyzeRelShuttle.getPartitionValueFilterPerScan(); + Map> partitionValueFilterPerScan = analyzer.getPartitionValueFilterPerScan(); Map> accessedPartitionMap = this.getAccessedPartitionsPerScan( logicalRoot.alg, partitionValueFilterPerScan ); // Build queryClass from query-name and partitions. - String queryClass = analyzeRelShuttle.getQueryName();// + accessedPartitionMap; + String queryHash = analyzer.getQueryName();// + accessedPartitionMap; // Build LogicalQueryInformation instance and prepare monitoring - LogicalQueryInformation queryInformation = new LogicalQueryInformationImpl( - queryClass, + LogicalQueryInformationImpl queryInformation = new LogicalQueryInformationImpl( + queryHash, accessedPartitionMap, - analyzeRelShuttle.availableColumns, - analyzeRelShuttle.availableColumnsWithTable, - analyzeRelShuttle.getUsedColumns(), - analyzeRelShuttle.getEntityId() ); + analyzer.availableColumns, + analyzer.availableColumnsWithTable, + analyzer.getUsedColumns(), + analyzer.scannedEntities, + analyzer.modifiedEntities ); this.prepareMonitoring( statement, logicalRoot, isAnalyze, isSubquery, queryInformation ); + // Update which tables where changed used for Materialized Views + MaterializedViewManager.getInstance().notifyModifiedTables( statement.getTransaction(), queryInformation.allModifiedEntities ); + return queryInformation; } @@ -1496,7 +1487,7 @@ private Pair, ProposedRoutingPlan> selectPlan( Propose this.cacheRouterPlans( proposedRoutingPlans, approximatedCosts, - queryInformation.getQueryClass(), + queryInformation.getQueryHash(), queryInformation.getAccessedPartitions().values().stream().flatMap( List::stream ).collect( Collectors.toSet() ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/processing/shuttles/LogicalQueryInformationImpl.java b/dbms/src/main/java/org/polypheny/db/processing/shuttles/LogicalQueryInformationImpl.java index 270ad53e51..cd4d443ffa 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/shuttles/LogicalQueryInformationImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/shuttles/LogicalQueryInformationImpl.java @@ -16,54 +16,70 @@ package org.polypheny.db.processing.shuttles; -import java.util.HashMap; -import java.util.LinkedHashMap; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.ImmutableSet.Builder; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.stream.Collectors; -import lombok.Getter; +import lombok.Value; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.routing.LogicalQueryInformation; - +@Value public class LogicalQueryInformationImpl implements LogicalQueryInformation { - @Getter - protected final Map availableColumns; // column id -> schemaName.tableName.ColumnName - protected final Map availableColumnsWithTable; // columnId -> tableId - protected final Map> accessedPartitions; // scanId -> partitionIds - protected final String queryId; - protected final Map usedColumns; - - @Getter - protected final List tablesIds; + public String queryHash; + public ImmutableMap> accessedPartitions; // scanId -> partitionIds + public ImmutableMap availableColumns; // column id -> schemaName.tableName.ColumnName + public ImmutableMap availableColumnsWithTable; // columnId -> tableId + public ImmutableMap usedColumns; + public ImmutableMap> scannedEntities; + public ImmutableMap> modifiedEntities; + public ImmutableSet allModifiedEntities; + public ImmutableSet allScannedEntities; + public ImmutableSet allEntities; public LogicalQueryInformationImpl( - String queryId, - Map> accessedPartitionMap, // scanId -> List of partitionIds - LinkedHashMap availableColumns, - HashMap availableColumnsWithTable, + String queryHash, + Map> accessedPartitions, // scanId -> List of partitionIds + Map availableColumns, + Map availableColumnsWithTable, Map usedColumns, - List tablesIds ) { - this.queryId = queryId; - this.accessedPartitions = accessedPartitionMap; - this.availableColumns = availableColumns; - this.availableColumnsWithTable = availableColumnsWithTable; - this.usedColumns = usedColumns; - this.tablesIds = tablesIds; + Map> scannedEntities, + Map> modifiedEntities ) { + this.queryHash = queryHash; + this.accessedPartitions = ImmutableMap.copyOf( accessedPartitions ); + this.availableColumns = ImmutableMap.copyOf( availableColumns ); + this.availableColumnsWithTable = ImmutableMap.copyOf( availableColumnsWithTable ); + this.usedColumns = ImmutableMap.copyOf( usedColumns ); + this.scannedEntities = ImmutableMap.copyOf( scannedEntities ); + this.modifiedEntities = ImmutableMap.copyOf( modifiedEntities ); + this.allModifiedEntities = buildAllModifiedEntities(); + this.allScannedEntities = buildAllScannedEntities(); + this.allEntities = buildAllEntities(); } - @Override - public Map> getAccessedPartitions() { - return this.accessedPartitions; + private ImmutableSet buildAllEntities() { + Builder set = ImmutableSet.builder(); + allModifiedEntities.forEach( set::add ); + allScannedEntities.forEach( set::add ); + return set.build(); } - @Override - public Map getAvailableColumnsWithTable() { - return this.availableColumnsWithTable; + private ImmutableSet buildAllScannedEntities() { + return ImmutableSet.copyOf( scannedEntities.values().stream().flatMap( Collection::stream ).collect( Collectors.toList() ) ); + } + + + private ImmutableSet buildAllModifiedEntities() { + return ImmutableSet.copyOf( modifiedEntities.values().stream().flatMap( Collection::stream ).collect( Collectors.toList() ) ); } @@ -71,7 +87,7 @@ public Map getAvailableColumnsWithTable() { public List getAllColumnsPerTable( Long tableId ) { final Map usedCols = this.availableColumns; return availableColumnsWithTable.entrySet().stream() - .filter( x -> x.getValue().equals( tableId ) && usedCols.keySet().contains( x.getKey() ) ) + .filter( x -> x.getValue().equals( tableId ) && usedCols.containsKey( x.getKey() ) ) .map( Entry::getKey ) .collect( Collectors.toList() ); } @@ -81,21 +97,9 @@ public List getAllColumnsPerTable( Long tableId ) { public List getUsedColumnsPerTable( Long tableId ) { Map usedCols = getUsedColumns(); return availableColumnsWithTable.entrySet().stream() - .filter( x -> x.getValue().equals( tableId ) && usedCols.keySet().contains( x.getKey() ) ) + .filter( x -> x.getValue().equals( tableId ) && usedCols.containsKey( x.getKey() ) ) .map( Entry::getKey ) .collect( Collectors.toList() ); } - - @Override - public Map getUsedColumns() { - return this.usedColumns; - } - - - @Override - public String getQueryClass() { - return this.queryId; - } - } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 45c285d441..f543273b85 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -205,7 +205,7 @@ protected List buildSelect( AlgNode node, List 1 ) { + if ( false && Catalog.snapshot().alloc().getFromLogical( catalogTable.id ).size() > 1 ) { // todo dl replace vert atm return handleHorizontalPartitioning( node, catalogTable, statement, logicalTable, builders, cluster, queryInformation ); } else { // At the moment multiple strategies diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index e0f17044af..140d09c84b 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.NonNull; @@ -111,7 +112,7 @@ public MaterializedViewManagerImpl( TransactionManager transactionManager ) { public synchronized Map updateMaterializedViewInfo() { List toRemove = new ArrayList<>(); for ( Long id : materializedInfo.keySet() ) { - if ( Catalog.getInstance().getSnapshot().getLogicalEntity( id ) == null ) { + if ( Catalog.getInstance().getSnapshot().getLogicalEntity( id ).isEmpty() ) { toRemove.add( id ); } } @@ -177,16 +178,23 @@ public synchronized void addMaterializedInfo( Long materializedId, MaterializedC * @param tableIds table that was changed */ @Override - public void addTables( Transaction transaction, List tableIds ) { + public void notifyModifiedTables( Transaction transaction, Collection tableIds ) { if ( tableIds.isEmpty() ) { return; } - LogicalTable catalogTable = Catalog.snapshot().rel().getTable( tableIds.get( 0 ) ).orElseThrow(); - long id = catalogTable.id; - /*if ( !catalogTable.getConnectedViews().isEmpty() ) { - updateCandidates.put( transaction.getXid(), id ); - }*/ + for ( long tableId : tableIds ) { + Optional tableOptional = Catalog.snapshot().rel().getTable( tableId ); + + if ( tableOptional.isEmpty() ) { + continue; + } + + if ( !transaction.getSnapshot().rel().getConnectedViews( tableOptional.get().id ).isEmpty() ) { + updateCandidates.put( transaction.getXid(), tableOptional.get().id ); + } + } + } @@ -212,7 +220,6 @@ public void updateCommittedXid( PolyXid xid ) { */ public void materializedUpdate( long potentialInteresting ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - //LogicalTable catalogTable = snapshot.getNamespaces( null ).stream().map( n -> snapshot.rel().getTable( potentialInteresting ) ).filter( Objects::nonNull ).findFirst().orElse( null ); List connectedViews = snapshot.rel().getConnectedViews( potentialInteresting ); for ( LogicalView view : connectedViews ) { diff --git a/dbms/src/test/java/org/polypheny/db/restapi/RestTest.java b/dbms/src/test/java/org/polypheny/db/restapi/RestTest.java index 4400f55107..dbf6bfecbd 100644 --- a/dbms/src/test/java/org/polypheny/db/restapi/RestTest.java +++ b/dbms/src/test/java/org/polypheny/db/restapi/RestTest.java @@ -213,8 +213,7 @@ public void testOperations() { String expected = "{\"result\":[{\"restschema.resttest.tsmallint\":45,\"restschema.resttest.tdecimal\":123.45,\"restschema.resttest.ttinyint\":22,\"restschema.resttest.treal\":0.3333,\"restschema.resttest.tinteger\":9876,\"restschema.resttest.ttime\":\"43505000\",\"restschema.resttest.tbigint\":1234,\"restschema.resttest.tboolean\":true,\"restschema.resttest.tdate\":18466,\"restschema.resttest.tdouble\":1.999999,\"restschema.resttest.tvarchar\":\"hallo\",\"restschema.resttest.ttimestamp\":\"2020-07-23T12:05:05\"}],\"size\":1}"; JsonElement jsonExpected = JsonParser.parseString( expected ); JsonElement jsonResult = JsonParser.parseString( executeRest( request ).getBody() ); - Assert.assertEquals( - jsonExpected, jsonResult ); + Assert.assertEquals( jsonExpected, jsonResult ); // Delete where = new LinkedHashMap<>(); diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DmlEventAnalyzer.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DmlEventAnalyzer.java index fd8faad2bd..6bbd554a01 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DmlEventAnalyzer.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DmlEventAnalyzer.java @@ -32,14 +32,14 @@ public static DmlDataPoint analyze( DmlEvent dmlEvent ) { DmlDataPoint metric = DmlDataPoint .builder() .Id( dmlEvent.getId() ) - .tables( dmlEvent.getLogicalQueryInformation().getTablesIds() ) + .tables( dmlEvent.getLogicalQueryInformation().getAllScannedEntities() ) .fieldNames( dmlEvent.getFieldNames() ) .executionTime( dmlEvent.getExecutionTime() ) .rowCount( dmlEvent.getRowCount() ) .isSubQuery( dmlEvent.isSubQuery() ) .isCommitted( dmlEvent.isCommitted() ) .recordedTimestamp( dmlEvent.getRecordedTimestamp() ) - .queryClass( dmlEvent.getLogicalQueryInformation().getQueryClass() ) + .queryClass( dmlEvent.getLogicalQueryInformation().getQueryHash() ) .monitoringType( dmlEvent.getMonitoringType() ) .physicalQueryClass( dmlEvent.getPhysicalQueryClass() ) .availableColumnsWithTable( dmlEvent.getLogicalQueryInformation().getAvailableColumnsWithTable() ) diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java index a010b60824..7d0c438eb6 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java @@ -33,7 +33,7 @@ public static QueryDataPointImpl analyze( QueryEvent queryEvent ) { return QueryDataPointImpl .builder() .Id( queryEvent.getId() ) - .tables( queryEvent.getLogicalQueryInformation().getTablesIds() ) + .tables( queryEvent.getLogicalQueryInformation().getAllScannedEntities() ) .fieldNames( queryEvent.getFieldNames() ) .executionTime( queryEvent.getExecutionTime() ) .rowCount( queryEvent.getRowCount() ) @@ -41,7 +41,7 @@ public static QueryDataPointImpl analyze( QueryEvent queryEvent ) { .isCommitted( queryEvent.isCommitted() ) .recordedTimestamp( queryEvent.getRecordedTimestamp() ) .algCompareString( queryEvent.getAlgCompareString() ) - .queryClass( queryEvent.getLogicalQueryInformation().getQueryClass() ) + .queryClass( queryEvent.getLogicalQueryInformation().getQueryHash() ) .monitoringType( queryEvent.getMonitoringType() ) .physicalQueryClass( queryEvent.getPhysicalQueryClass() ) .availableColumnsWithTable( queryEvent.getLogicalQueryInformation().getAvailableColumnsWithTable() ) diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java index 192b684401..a70a45dc96 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java @@ -19,10 +19,11 @@ import java.io.Serializable; import java.sql.Timestamp; -import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.UUID; import lombok.AccessLevel; import lombok.AllArgsConstructor; @@ -44,7 +45,7 @@ public class DmlDataPoint implements MonitoringDataPoint, Serializable { private static final long serialVersionUID = 8159995420459385039L; @Builder.Default - private final List tables = new ArrayList<>(); + private final Set tables = new HashSet<>(); private final Map dataElements = new HashMap<>(); private UUID Id; private Timestamp recordedTimestamp; @@ -59,7 +60,7 @@ public class DmlDataPoint implements MonitoringDataPoint, Serializable { private String queryClass; private String physicalQueryClass; @Builder.Default - private final HashMap> changedValues = new HashMap<>(); + private final Map> changedValues = new HashMap<>(); @Builder.Default private final Map availableColumnsWithTable = new HashMap<>(); diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java index a06da4c4f3..94c4cce7ee 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java @@ -19,10 +19,11 @@ import java.io.Serializable; import java.sql.Timestamp; -import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.UUID; import lombok.Builder; import lombok.Value; @@ -37,7 +38,7 @@ public class QueryDataPointImpl implements QueryDataPoint, Serializable { private static final long serialVersionUID = 4389301720141941770L; @Builder.Default - List tables = new ArrayList<>(); + Set tables = new HashSet<>(); Map dataElements = new HashMap<>(); UUID Id; Timestamp recordedTimestamp; diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 0469f39d12..67dbee1164 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -1425,11 +1425,11 @@ private static Function1 getPolyToExternalizer( AlgDataType t case DECIMAL: return o -> o.asNumber().BigDecimalValue(); case DATE: - return o -> o.asDate().sinceEpoch / DateTimeUtils.MILLIS_PER_DAY; + return o -> o.asDate().milliSinceEpoch / DateTimeUtils.MILLIS_PER_DAY; case TIME: return o -> o.asTime().ofDay % DateTimeUtils.MILLIS_PER_DAY; case TIMESTAMP: - return o -> o.asTimeStamp().sinceEpoch; + return o -> o.asTimeStamp().milliSinceEpoch; case BOOLEAN: return o -> o.asBoolean().value; case ARRAY: diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestResult.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestResult.java index c113eeeaea..339e502ded 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestResult.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestResult.java @@ -29,9 +29,7 @@ import java.io.OutputStream; import java.io.PushbackInputStream; import java.nio.charset.StandardCharsets; -import java.sql.Blob; import java.time.LocalDateTime; -import java.time.ZoneOffset; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; @@ -48,7 +46,6 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFamily; import org.polypheny.db.type.entity.PolyValue; import org.polypheny.db.util.Pair; @@ -123,34 +120,72 @@ private void transformNonDML() { List> result = new ArrayList<>(); while ( iterator.hasNext() ) { Object next = iterator.next(); - Object[] row; + PolyValue[] row; if ( next.getClass().isArray() ) { - row = (Object[]) next; + row = (PolyValue[]) next; } else { - row = new Object[]{ next }; + row = new PolyValue[]{ (PolyValue) next }; } HashMap temp = new HashMap<>(); int i = 0; for ( AlgDataTypeField type : dataType.getFieldList() ) { - Object o = row[i]; + PolyValue o = row[i]; + + if ( o == null ) { + temp.put( columns.get( i ).columnName, null ); + continue; + } + if ( type.getType().getPolyType().getFamily() == PolyTypeFamily.MULTIMEDIA ) { - if ( o instanceof File ) { + /* + if ( o.isBlob() ) {//file o = addZipEntry( o ); } else if ( o instanceof InputStream || o instanceof Blob ) { o = addZipEntry( o ); } else if ( o instanceof byte[] ) { o = addZipEntry( o ); - } + }*///todo dl rest temp.put( columns.get( i ).columnName, o ); } else { - if ( type.getType().getPolyType().equals( PolyType.TIMESTAMP ) ) { - Long nanoSeconds = (Long) o; - LocalDateTime localDateTime = LocalDateTime.ofEpochSecond( nanoSeconds / 1000L, (int) ((nanoSeconds % 1000) * 1000), ZoneOffset.UTC ); - temp.put( columns.get( i ).columnName, localDateTime.toString() ); - } else if ( type.getType().getPolyType().equals( PolyType.TIME ) ) { - temp.put( columns.get( i ).columnName, o.toString() ); - } else { - temp.put( columns.get( i ).columnName, o ); + switch ( type.getType().getPolyType() ) { + case TIMESTAMP: + //Long nanoSeconds = o.asTimeStamp().asSqlTimestamp().toLocalDateTime(); + LocalDateTime localDateTime = o.asTimeStamp().asSqlTimestamp().toLocalDateTime(); //LocalDateTime.ofEpochSecond( nanoSeconds / 1000L, (int) ((nanoSeconds % 1000) * 1000), ZoneOffset.UTC ); + temp.put( columns.get( i ).columnName, localDateTime.toString() ); + break; + case TIME: + temp.put( columns.get( i ).columnName, o.asTime().ofDay ); + break; + case VARCHAR: + temp.put( columns.get( i ).columnName, o.asString().value ); + break; + case DOUBLE: + temp.put( columns.get( i ).columnName, o.asNumber().DoubleValue() ); + break; + case REAL: + case FLOAT: + temp.put( columns.get( i ).columnName, o.asNumber().FloatValue() ); + break; + case DECIMAL: + temp.put( columns.get( i ).columnName, o.asNumber().bigDecimalValue() ); + break; + case BOOLEAN: + temp.put( columns.get( i ).columnName, o.asBoolean().value ); + break; + case BIGINT: + temp.put( columns.get( i ).columnName, o.asNumber().LongValue() ); + break; + case TINYINT: + case SMALLINT: + case INTEGER: + temp.put( columns.get( i ).columnName, o.asNumber().IntValue() ); + break; + case DATE: + temp.put( columns.get( i ).columnName, o.asDate().getDaysSinceEpoch() ); + break; + default: + temp.put( columns.get( i ).columnName, o ); + break; } } i++; @@ -238,7 +273,7 @@ public Pair getResult( final Context ctx ) { finalResult.put( "result", result ); finalResult.put( "size", result.size() ); if ( !containsFiles ) { - return new Pair( gson.toJson( finalResult ), finalResult.size() ); + return new Pair<>( gson.toJson( finalResult ), finalResult.size() ); } else { OutputStream os; ZipEntry zipEntry = new ZipEntry( "data.json" ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlAbstractDateTimeLiteral.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlAbstractDateTimeLiteral.java index 92123bb92a..5decd96432 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlAbstractDateTimeLiteral.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlAbstractDateTimeLiteral.java @@ -56,7 +56,7 @@ protected SqlAbstractDateTimeLiteral( PolyTemporal d, boolean tz, PolyType typeN * Converts this literal to a {@link TimestampString}. */ protected TimestampString getTimestamp() { - return TimestampString.fromMillisSinceEpoch( value.asTemporal().getSinceEpoch() ); + return TimestampString.fromMillisSinceEpoch( value.asTemporal().getMilliSinceEpoch() ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/FunctionsTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/FunctionsTest.java index 8b1742641e..6cc1bd83e1 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/FunctionsTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/FunctionsTest.java @@ -209,7 +209,7 @@ public void testAddMonths() { private void checkAddMonths( int y0, int m0, int d0, int y1, int m1, int d1, int months ) { final PolyDate date0 = PolyDate.of( ymdToUnixDate( y0, m0, d0 ) ); - final long date = addMonths( date0, PolyLong.of( months ) ).sinceEpoch; + final long date = addMonths( date0, PolyLong.of( months ) ).milliSinceEpoch; final PolyDate date1 = PolyDate.of( ymdToUnixDate( y1, m1, d1 ) ); assertThat( (int) date, is( date1 ) ); @@ -225,7 +225,7 @@ private void checkAddMonths( int y0, int m0, int d0, int y1, int m1, int d1, int * Converts a date (days since epoch) and milliseconds (since midnight) into a timestamp (milliseconds since epoch). */ private PolyDate d2ts( PolyDate date, int millis ) { - return PolyDate.of( date.sinceEpoch * DateTimeUtils.MILLIS_PER_DAY + millis ); + return PolyDate.of( date.milliSinceEpoch * DateTimeUtils.MILLIS_PER_DAY + millis ); }