Skip to content

Commit

Permalink
Merge branch 'feature/improve-contact'
Browse files Browse the repository at this point in the history
  • Loading branch information
jhf committed Jan 28, 2025
2 parents 4feaca7 + 15bebdc commit bf43328
Show file tree
Hide file tree
Showing 210 changed files with 5,033 additions and 1,277 deletions.
4 changes: 2 additions & 2 deletions cli/spec/statbus_spec.cr
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
require "./spec_helper"

describe Statbus do
describe StatBus do
# TODO: Write tests

it "works" do
false.should eq(true)
true.should eq(true)
end
end
22 changes: 8 additions & 14 deletions cli/src/dotenv.cr
Original file line number Diff line number Diff line change
Expand Up @@ -161,12 +161,10 @@ class Dotenv
# and we have a Path.
def self.using(source : Path | String, verbose = false, &block : Dotenv -> T) forall T
dotenv = case source
when Path
in Path
from_file(source, verbose)
when String
in String
from_string(source, verbose)
else
raise "Unsupported source type"
end

initial_content = dotenv.dotenv_content.to_s
Expand Down Expand Up @@ -212,18 +210,14 @@ class Dotenv
# Appends a line to the dotenv contents and updates the in-memory representation
def puts(line : String)
env_line = parse_line(line)
if env_line.is_a?(KeyValueLine)
File.open(@dotenv_path, "a") do |file|
file.puts(line)
end
@dotenv_content.lines << env_line
case env_line
in KeyValueLine
STDERR.puts "Adding line: #{env_line.key}=#{env_line.value}" if @verbose
File.open(@dotenv_path, "a") do |file|
file.puts(line)
end
@dotenv_content.lines << env_line
@dotenv_content.mapping[env_line.key] = env_line
else
File.open(@dotenv_path, "a") do |file|
file.puts(line)
end
@dotenv_content.lines << env_line
end
end

Expand Down
15 changes: 15 additions & 0 deletions devops/manage-statbus.sh
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,21 @@ case "$action" in
esac
done
;;
'make-all-failed-test-results-expected' )
if [ ! -f "$WORKSPACE/test/regression.out" ]; then
echo "No regression.out file found. Run tests first to generate failures."
exit 1
fi

grep 'FAILED' "$WORKSPACE/test/regression.out" | awk '{print $2}' | while read -r test; do
if [ -f "$WORKSPACE/test/results/$test.out" ]; then
echo "Copying results to expected for test: $test"
cp -f "$WORKSPACE/test/results/$test.out" "$WORKSPACE/test/expected/$test.out"
else
echo "Warning: No results file found for test: $test"
fi
done
;;
'activate_sql_saga' )
PGUSER=supabase_admin ./devops/manage-statbus.sh psql -c 'create extension sql_saga cascade;'
;;
Expand Down
16 changes: 16 additions & 0 deletions doc/db/function/admin_drop_table_views_for_batch_api(regclass).md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,22 @@ BEGIN
EXECUTE 'DROP FUNCTION ' || upsert_function_name_custom || '()';

EXECUTE 'DROP FUNCTION ' || prepare_function_name_custom || '()';

-- Get unique columns and construct index name using same logic as in generate_active_code_custom_unique_constraint
DECLARE
table_properties admin.batch_api_table_properties;
unique_columns text[];
index_name text;
BEGIN
table_properties := admin.detect_batch_api_table_properties(table_name);
unique_columns := admin.get_unique_columns(table_properties);

-- Only attempt to drop if we have unique columns
IF array_length(unique_columns, 1) IS NOT NULL THEN
index_name := 'ix_' || table_name_str || '_' || array_to_string(unique_columns, '_');
EXECUTE format('DROP INDEX IF EXISTS %I', index_name);
END IF;
END;
END;
$function$
```
45 changes: 27 additions & 18 deletions doc/db/function/admin_import_establishment_era_upsert().md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ CREATE OR REPLACE FUNCTION admin.import_establishment_era_upsert()
AS $function$
DECLARE
new_jsonb JSONB := to_jsonb(NEW);
edited_by_user RECORD;
edit_by_user RECORD;
tag RECORD;
physical_region RECORD;
physical_country RECORD;
Expand Down Expand Up @@ -65,7 +65,7 @@ BEGIN
SELECT NULL::int AS id INTO inserted_activity;
SELECT NULL::int AS id INTO inserted_stat_for_unit;

SELECT * INTO edited_by_user
SELECT * INTO edit_by_user
FROM public.statbus_user
WHERE uuid = auth.uid()
LIMIT 1;
Expand Down Expand Up @@ -136,7 +136,7 @@ BEGIN
FROM admin.process_enterprise_connection(
prior_establishment_id, 'establishment',
new_typed.valid_from, new_typed.valid_to,
edited_by_user.id) AS r;
edit_by_user.id) AS r;
END IF;

-- If no legal_unit is specified, but there was an existing entry connected to
Expand Down Expand Up @@ -189,6 +189,7 @@ BEGIN
, primary_for_enterprise
, data_source_id
, edit_by_user_id
, edit_at
)
VALUES
( new_typed.valid_from
Expand All @@ -206,7 +207,8 @@ BEGIN
, is_primary_for_legal_unit
, is_primary_for_enterprise
, data_source.id
, edited_by_user.id
, edit_by_user.id
, statement_timestamp()
)
RETURNING *
INTO inserted_establishment;
Expand Down Expand Up @@ -234,7 +236,7 @@ BEGIN
external_idents_to_add,
p_legal_unit_id => null::INTEGER,
p_establishment_id => inserted_establishment.id,
p_updated_by_user_id => edited_by_user.id
p_edit_by_user_id => edit_by_user.id
);

IF physical_region.id IS NOT NULL OR physical_country.id IS NOT NULL THEN
Expand All @@ -251,7 +253,8 @@ BEGIN
, region_id
, country_id
, data_source_id
, updated_by_user_id
, edit_by_user_id
, edit_at
)
VALUES
( new_typed.valid_from
Expand All @@ -266,7 +269,8 @@ BEGIN
, physical_region.id
, physical_country.id
, data_source.id
, edited_by_user.id
, edit_by_user.id
, statement_timestamp()
)
RETURNING *
INTO inserted_location;
Expand Down Expand Up @@ -303,7 +307,8 @@ BEGIN
, region_id
, country_id
, data_source_id
, updated_by_user_id
, edit_by_user_id
, edit_at
)
VALUES
( new_typed.valid_from
Expand All @@ -318,7 +323,8 @@ BEGIN
, postal_region.id
, postal_country.id
, data_source.id
, edited_by_user.id
, edit_by_user.id
, statement_timestamp()
)
RETURNING * INTO inserted_location;
END IF;
Expand Down Expand Up @@ -348,8 +354,8 @@ BEGIN
, type
, category_id
, data_source_id
, updated_by_user_id
, updated_at
, edit_by_user_id
, edit_at
)
VALUES
( new_typed.valid_from
Expand All @@ -358,7 +364,7 @@ BEGIN
, 'primary'
, primary_activity_category.id
, data_source.id
, edited_by_user.id
, edit_by_user.id
, statement_timestamp()
)
RETURNING *
Expand Down Expand Up @@ -390,8 +396,8 @@ BEGIN
, type
, category_id
, data_source_id
, updated_by_user_id
, updated_at
, edit_by_user_id
, edit_at
)
VALUES
( new_typed.valid_from
Expand All @@ -400,7 +406,7 @@ BEGIN
, 'secondary'
, secondary_activity_category.id
, data_source.id
, edited_by_user.id
, edit_by_user.id
, statement_timestamp()
)
RETURNING *
Expand Down Expand Up @@ -438,15 +444,18 @@ BEGIN
INSERT INTO public.tag_for_unit
( tag_id
, establishment_id
, updated_by_user_id
, edit_by_user_id
, edit_at
)
VALUES
( tag.id
, inserted_establishment.id
, edited_by_user.id
, edit_by_user.id
, statement_timestamp()
)
ON CONFLICT (tag_id, establishment_id)
DO UPDATE SET updated_by_user_id = EXCLUDED.updated_by_user_id
DO UPDATE SET edit_by_user_id = EXCLUDED.edit_by_user_id
, edit_at = EXCLUDED.edit_at
;
END IF;

Expand Down
Loading

0 comments on commit bf43328

Please sign in to comment.