diff --git a/backend/lcfs/db/migrations/versions/2025-01-16-19-35_998929392c8b.py b/backend/lcfs/db/migrations/versions/2025-01-16-19-35_998929392c8b.py new file mode 100644 index 000000000..2bb1c4cc3 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2025-01-16-19-35_998929392c8b.py @@ -0,0 +1,51 @@ +"""add marine end use + +Revision ID: 998929392c8b +Revises: 5bc0ef48739a +Create Date: 2025-01-07 19:35:00.064999 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "998929392c8b" +down_revision = "5bc0ef48739a" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.execute( + """ + INSERT INTO end_use_type (end_use_type_id, type, intended_use) + VALUES (25, 'Marine', TRUE) + ON CONFLICT (end_use_type_id) DO NOTHING; + """ + ) + # Energy Effectiveness Ratios + op.execute( + """ + INSERT INTO energy_effectiveness_ratio ( + eer_id, fuel_category_id, fuel_type_id, end_use_type_id, ratio, effective_status + ) + VALUES (44, 2, 3, 25, 2.5, TRUE) + ON CONFLICT (eer_id) DO NOTHING; + """ + ) + + +def downgrade() -> None: + op.execute( + """ + DELETE FROM energy_effectiveness_ratio + WHERE eer_id = 44; + """ + ) + op.execute( + """ + DELETE FROM end_use_type + WHERE end_use_type_id = 25; + """ + ) diff --git a/etl/database/nifi-registry-primary.mv.db b/etl/database/nifi-registry-primary.mv.db index 700e83338..917968e62 100644 Binary files a/etl/database/nifi-registry-primary.mv.db and b/etl/database/nifi-registry-primary.mv.db differ diff --git a/etl/nifi/conf/flow.json.gz b/etl/nifi/conf/flow.json.gz index b9c271f27..bc0cd697b 100644 Binary files a/etl/nifi/conf/flow.json.gz and b/etl/nifi/conf/flow.json.gz differ diff --git a/etl/nifi/conf/flow.xml.gz b/etl/nifi/conf/flow.xml.gz index 13fa45db2..82cb34231 100644 Binary files a/etl/nifi/conf/flow.xml.gz and b/etl/nifi/conf/flow.xml.gz differ diff --git a/etl/nifi_scripts/clean_ups.groovy b/etl/nifi_scripts/clean_ups.groovy index 025c46ecb..8a893064d 100644 --- a/etl/nifi_scripts/clean_ups.groovy +++ b/etl/nifi_scripts/clean_ups.groovy @@ -1,7 +1,6 @@ import java.sql.Connection import java.sql.PreparedStatement -import java.sql.ResultSet -import groovy.json.JsonSlurper +import java.sql.SQLException log.warn('**** STARTING TRANSFER UPDATE SQL ****') @@ -14,39 +13,73 @@ def updateTransferEffectiveDateSQL = """ AND update_date::date = transaction_effective_date::date; -- On the same day as transaction_effective_date """ -// Fetch connections to both source and destination databases -// Replace the UUIDs with your actual Controller Service identifiers -// For this UPDATE, only the destination database connection is required -def destinationDbcpService = context.controllerServiceLookup.getControllerService('3244bf63-0192-1000-ffff-ffffc8ec6d93') +// Cleanup queries +def cleanUpQueries = [ + """ + -- 105 + UPDATE "transaction" + SET compliance_units = -6994 + WHERE transaction_id = 1491; + """, + """ + -- 273 + UPDATE compliance_report + SET transaction_id = null + WHERE compliance_report_id = 764; + """, + """ + DELETE FROM "transaction" + WHERE transaction_id = 1920; + """ +] -// Initialize database connections +// Fetch connection to the destination database +def destinationDbcpService = context.controllerServiceLookup.getControllerService('3244bf63-0192-1000-ffff-ffffc8ec6d93') Connection destinationConn = null try { - // Get a connection from the Destination DBCP Connection Pool + // Obtain a connection from the Destination DBCP Connection Pool destinationConn = destinationDbcpService.getConnection() + destinationConn.setAutoCommit(false) // Begin transaction - // Step 1: Execute the UPDATE statement - PreparedStatement updateStmt = destinationConn.prepareStatement(updateTransferEffectiveDateSQL) - - // Execute the UPDATE statement - int rowsUpdated = updateStmt.executeUpdate() + // Step 1: Execute the UPDATE on public.transfer + try (PreparedStatement updateStmt = destinationConn.prepareStatement(updateTransferEffectiveDateSQL)) { + int rowsUpdated = updateStmt.executeUpdate() + log.info("Successfully executed UPDATE on 'public.transfer'. Rows affected: ${rowsUpdated}") + } - log.info("Successfully executed UPDATE on 'public.transfer'. Rows affected: ${rowsUpdated}") + // Step 2: Execute the cleanup queries in sequence + cleanUpQueries.each { query -> + try (PreparedStatement stmt = destinationConn.prepareStatement(query)) { + stmt.executeUpdate() + } + } + log.info("Cleanup queries executed successfully.") - // Close the UPDATE statement - updateStmt.close() + // Commit transaction + destinationConn.commit() + log.info("Transaction committed successfully.") } catch (Exception e) { - log.error('Error occurred while executing TRANSFER UPDATE SQL', e) - throw new ProcessException(e) + // Rollback transaction on error + if (destinationConn != null) { + try { + destinationConn.rollback() + log.warn("Transaction rolled back due to error.") + } catch (SQLException rollbackEx) { + log.error("Error occurred during transaction rollback", rollbackEx) + } + } + log.error('Error occurred during SQL operations', e) + throw new RuntimeException(e) } finally { // Ensure the connection is closed if (destinationConn != null) { try { destinationConn.close() - } catch (SQLException ignore) { - // Ignored + log.info("Database connection closed.") + } catch (SQLException closeEx) { + log.warn("Error occurred while closing the database connection", closeEx) } } } diff --git a/frontend/Dockerfile.openshift b/frontend/Dockerfile.openshift index 9cc584f19..7801a3e46 100644 --- a/frontend/Dockerfile.openshift +++ b/frontend/Dockerfile.openshift @@ -1,4 +1,4 @@ -FROM artifacts.developer.gov.bc.ca/docker-remote/node:20 as builder +FROM artifacts.developer.gov.bc.ca/docker-remote/node:20.18.1 as builder ENV NODE_ENV=production WORKDIR /usr/src/app COPY ./ ./