From aeefa31c018ce482691d66fee04fec0620cf2ce4 Mon Sep 17 00:00:00 2001 From: Adrian Herrmann Date: Mon, 4 Nov 2024 13:32:00 +0000 Subject: [PATCH 01/17] feat: added migration for new user cols and calculation for used space per user and element --- .../calculate_collection_space_v01.sql | 44 ++ db/functions/calculate_dataset_space_v01.sql | 13 + db/functions/calculate_element_space_v01.sql | 20 + db/functions/calculate_used_space_v01.sql | 64 ++ .../20241104100001_add_col_used_space.rb | 20 + db/schema.rb | 591 ++++++++++++++++++ 6 files changed, 752 insertions(+) create mode 100644 db/functions/calculate_collection_space_v01.sql create mode 100644 db/functions/calculate_dataset_space_v01.sql create mode 100644 db/functions/calculate_element_space_v01.sql create mode 100644 db/functions/calculate_used_space_v01.sql create mode 100644 db/migrate/20241104100001_add_col_used_space.rb diff --git a/db/functions/calculate_collection_space_v01.sql b/db/functions/calculate_collection_space_v01.sql new file mode 100644 index 0000000000..413399a115 --- /dev/null +++ b/db/functions/calculate_collection_space_v01.sql @@ -0,0 +1,44 @@ +CREATE OR REPLACE function calculate_collection_space(collectionId integer) +returns bigint +language plpgsql +as $function$ +declare + used_space_samples bigint default 0; + used_space_reactions bigint default 0; + used_space_wellplates bigint default 0; + used_space_screens bigint default 0; + used_space_research_plans bigint default 0; + used_space bigint default 0; +begin + select sum(calculate_element_space(sample_id, 'Sample')) into used_space_samples + from collections_samples + where collection_id = collectionId; + + used_space = COALESCE(used_space_samples,0); + + select sum(calculate_element_space(reaction_id, 'Reaction')) into used_space_reactions + from collections_reactions + where collection_id = collectionId; + + used_space = used_space + COALESCE(used_space_reactions,0); + + select sum(calculate_element_space(wellplate_id, 'Wellplate')) into used_space_wellplates + from collections_wellplates + where collection_id = collectionId; + + used_space = used_space + COALESCE(used_space_wellplates,0); + + select sum(calculate_element_space(screen_id, 'Screen')) into used_space_screens + from collections_screens + where collection_id = collectionId; + + used_space = used_space + COALESCE(used_space_screens,0); + + select sum(calculate_element_space(research_plan_id, 'ResearchPlan')) into used_space_research_plans + from collections_research_plans + where collection_id = collectionId; + + used_space = used_space + COALESCE(used_space_research_plans,0); + + return COALESCE(used_space,0); +end;$function$; diff --git a/db/functions/calculate_dataset_space_v01.sql b/db/functions/calculate_dataset_space_v01.sql new file mode 100644 index 0000000000..6899575974 --- /dev/null +++ b/db/functions/calculate_dataset_space_v01.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE function calculate_dataset_space(cid integer) +returns bigint +language plpgsql +as $function$ +declare + used_space bigint default 0; +begin + select sum((attachment_data->'metadata'->'size')::bigint) into used_space + from attachments + where attachable_type = 'Container' and attachable_id = cid + and attachable_id in (select id from containers where container_type = 'dataset'); + return COALESCE(used_space,0); +end;$function$; diff --git a/db/functions/calculate_element_space_v01.sql b/db/functions/calculate_element_space_v01.sql new file mode 100644 index 0000000000..bb7f06508a --- /dev/null +++ b/db/functions/calculate_element_space_v01.sql @@ -0,0 +1,20 @@ +CREATE OR REPLACE function calculate_element_space(el_id integer, el_type text) +returns bigint +language plpgsql +as $function$ +declare + used_space_attachments bigint default 0; + used_space_datasets bigint default 0; + used_space bigint default 0; +begin + select sum((attachment_data->'metadata'->'size')::bigint) into used_space_attachments + from attachments + where attachable_type = el_type and attachable_id = el_id; + used_space = COALESCE(used_space_attachments, 0); + + select sum(calculate_dataset_space(descendant_id)) into used_space_datasets + from container_hierarchies where ancestor_id = (select id from containers where containable_id = el_id and containable_type = el_type); + used_space = used_space + COALESCE(used_space_datasets, 0); + + return COALESCE(used_space, 0); +end;$function$; diff --git a/db/functions/calculate_used_space_v01.sql b/db/functions/calculate_used_space_v01.sql new file mode 100644 index 0000000000..a68695b298 --- /dev/null +++ b/db/functions/calculate_used_space_v01.sql @@ -0,0 +1,64 @@ +CREATE OR REPLACE function calculate_used_space(userId integer) +returns bigint +language plpgsql +as $function$ +declare + used_space_samples bigint default 0; + used_space_reactions bigint default 0; + used_space_wellplates bigint default 0; + used_space_screens bigint default 0; + used_space_research_plans bigint default 0; + used_space_reports bigint default 0; + used_space_inbox bigint default 0; + used_space bigint default 0; +begin + select sum(calculate_element_space(s.sample_id, 'Sample')) into used_space_samples from ( + select distinct sample_id + from collections_samples + where collection_id in (select id from collections where user_id = userId) + ) s; + used_space = COALESCE(used_space_samples,0); + + select sum(calculate_element_space(r.reaction_id, 'Reaction')) into used_space_reactions from ( + select distinct reaction_id + from collections_reactions + where collection_id in (select id from collections where user_id = userId) + ) r; + used_space = used_space + COALESCE(used_space_reactions,0); + + select sum(calculate_element_space(wp.wellplate_id, 'Wellplate')) into used_space_wellplates from ( + select distinct wellplate_id + from collections_wellplates + where collection_id in (select id from collections where user_id = userId) + ) wp; + used_space = used_space + COALESCE(used_space_wellplates,0); + + select sum(calculate_element_space(wp.screen_id, 'Screen')) into used_space_screens from ( + select distinct screen_id + from collections_screens + where collection_id in (select id from collections where user_id = userId) + ) wp; + used_space = used_space + COALESCE(used_space_screens,0); + + select sum(calculate_element_space(rp.research_plan_id, 'ResearchPlan')) into used_space_research_plans from ( + select distinct research_plan_id + from collections_research_plans + where collection_id in (select id from collections where user_id = userId) + ) rp; + used_space = used_space + COALESCE(used_space_research_plans,0); + + select sum(calculate_element_space(id, 'Report')) into used_space_reports + from reports + where author_id = userId; + used_space = used_space + COALESCE(used_space_reports,0); + + select sum((attachment_data->'metadata'->'size')::bigint) into used_space_inbox + from attachments + where attachable_type = 'Container' + and attachable_id is null and created_for = userId; + -- attachable_id is missing (why?), if this is a bug (and was fixed) change statement to + -- and attachable_id = (select id from containers where containable_type='User' and containable_id=UserID); + used_space = used_space + COALESCE(used_space_inbox,0); + + return COALESCE(used_space,0); +end;$function$; diff --git a/db/migrate/20241104100001_add_col_used_space.rb b/db/migrate/20241104100001_add_col_used_space.rb new file mode 100644 index 0000000000..f636eb64b6 --- /dev/null +++ b/db/migrate/20241104100001_add_col_used_space.rb @@ -0,0 +1,20 @@ +class AddColUsedSpace < ActiveRecord::Migration[6.1] + def up + add_column :users, :used_space, :bigint, :default => 0 + add_column :users, :available_space, :bigint, :default => 0 + + create_function :calculate_dataset_space + create_function :calculate_element_space + create_function :calculate_collection_space + create_function :calculate_used_space + execute "update users set used_space = calculate_used_space(id);" + end + def down + drop_function :calculate_used_space + drop_function :calculate_collection_space + drop_function :calculate_element_space + drop_function :calculate_dataset_space + remove_column :users, :available_space, :bigint + remove_column :users, :used_space, :bigint + end +end diff --git a/db/schema.rb b/db/schema.rb index 5a3fc3175d..bef231c09d 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -72,6 +72,7 @@ t.bigint "filesize" t.jsonb "attachment_data" t.integer "con_state" + t.jsonb "log_data" t.index ["attachable_type", "attachable_id"], name: "index_attachments_on_attachable_type_and_attachable_id" t.index ["identifier"], name: "index_attachments_on_identifier", unique: true end @@ -351,6 +352,8 @@ t.datetime "updated_at", null: false t.integer "parent_id" t.text "plain_text_content" + t.jsonb "log_data" + t.datetime "deleted_at" t.index ["containable_type", "containable_id"], name: "index_containers_on_containable" end @@ -607,6 +610,7 @@ t.float "loading" t.datetime "created_at" t.datetime "updated_at" + t.jsonb "log_data" t.index ["sample_id"], name: "index_elemental_compositions_on_sample_id" end @@ -1064,6 +1068,7 @@ t.text "plain_text_observation" t.jsonb "vessel_size", default: {"unit"=>"ml", "amount"=>nil} t.boolean "gaseous", default: false + t.jsonb "log_data" t.index ["deleted_at"], name: "index_reactions_on_deleted_at" t.index ["rinchi_short_key"], name: "index_reactions_on_rinchi_short_key", order: :desc t.index ["rinchi_web_key"], name: "index_reactions_on_rinchi_web_key" @@ -1167,6 +1172,7 @@ t.text "subject" t.jsonb "alternate_identifier" t.jsonb "related_identifier" + t.jsonb "log_data" t.index ["deleted_at"], name: "index_research_plan_metadata_on_deleted_at" t.index ["research_plan_id"], name: "index_research_plan_metadata_on_research_plan_id" end @@ -1187,6 +1193,7 @@ t.datetime "created_at", null: false t.datetime "updated_at", null: false t.jsonb "body" + t.jsonb "log_data" end create_table "research_plans_screens", force: :cascade do |t| @@ -1215,6 +1222,7 @@ t.hstore "custom_info" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.jsonb "log_data" t.index ["sample_id"], name: "index_residues_on_sample_id" end @@ -1273,6 +1281,7 @@ t.jsonb "solvent" t.boolean "dry_solvent", default: false t.boolean "inventory_sample", default: false + t.jsonb "log_data" t.index ["deleted_at"], name: "index_samples_on_deleted_at" t.index ["identifier"], name: "index_samples_on_identifier" t.index ["inventory_sample"], name: "index_samples_on_inventory_sample" @@ -1313,6 +1322,7 @@ t.datetime "deleted_at" t.jsonb "component_graph_data", default: {} t.text "plain_text_description" + t.jsonb "log_data" t.index ["deleted_at"], name: "index_screens_on_deleted_at" end @@ -1442,6 +1452,14 @@ t.index ["name"], name: "index_third_party_apps_on_name", unique: true end + create_table "used_space", id: false, force: :cascade do |t| + t.decimal "sum" + end + + create_table "used_space_reports", id: false, force: :cascade do |t| + t.decimal "sum" + end + create_table "user_affiliations", id: :serial, force: :cascade do |t| t.integer "user_id" t.integer "affiliation_id" @@ -1498,6 +1516,8 @@ t.boolean "account_active" t.integer "matrix", default: 0 t.jsonb "providers" + t.bigint "used_space", default: 0 + t.bigint "available_space", default: 0 t.index ["confirmation_token"], name: "index_users_on_confirmation_token", unique: true t.index ["deleted_at"], name: "index_users_on_deleted_at" t.index ["email"], name: "index_users_on_email", unique: true @@ -1587,6 +1607,7 @@ t.text "plain_text_description" t.integer "width", default: 12 t.integer "height", default: 8 + t.jsonb "log_data" t.index ["deleted_at"], name: "index_wellplates_on_deleted_at" end @@ -1602,6 +1623,7 @@ t.jsonb "readouts", default: [{"unit"=>"", "value"=>""}] t.string "label", default: "Molecular structure", null: false t.string "color_code" + t.jsonb "log_data" t.index ["deleted_at"], name: "index_wells_on_deleted_at" t.index ["sample_id"], name: "index_wells_on_sample_id" t.index ["wellplate_id"], name: "index_wells_on_wellplate_id" @@ -1855,14 +1877,583 @@ END; $function$ SQL + create_function :logidze_snapshot, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.logidze_snapshot(item jsonb, ts_column text DEFAULT NULL::text, columns text[] DEFAULT NULL::text[], include_columns boolean DEFAULT false) + RETURNS jsonb + LANGUAGE plpgsql + AS $function$ + -- version: 3 + DECLARE + ts timestamp with time zone; + k text; + BEGIN + item = item - 'log_data'; + IF ts_column IS NULL THEN + ts := statement_timestamp(); + ELSE + ts := coalesce((item->>ts_column)::timestamp with time zone, statement_timestamp()); + END IF; + + IF columns IS NOT NULL THEN + item := logidze_filter_keys(item, columns, include_columns); + END IF; + + FOR k IN (SELECT key FROM jsonb_each(item)) + LOOP + IF jsonb_typeof(item->k) = 'object' THEN + item := jsonb_set(item, ARRAY[k], to_jsonb(item->>k)); + END IF; + END LOOP; + + return json_build_object( + 'v', 1, + 'h', jsonb_build_array( + logidze_version(1, item, ts) + ) + ); + END; + $function$ + SQL + create_function :logidze_logger, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.logidze_logger() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + -- version: 2 + DECLARE + changes jsonb; + version jsonb; + snapshot jsonb; + new_v integer; + size integer; + history_limit integer; + debounce_time integer; + current_version integer; + k text; + iterator integer; + item record; + columns text[]; + include_columns boolean; + ts timestamp with time zone; + ts_column text; + err_sqlstate text; + err_message text; + err_detail text; + err_hint text; + err_context text; + err_table_name text; + err_schema_name text; + err_jsonb jsonb; + err_captured boolean; + BEGIN + ts_column := NULLIF(TG_ARGV[1], 'null'); + columns := NULLIF(TG_ARGV[2], 'null'); + include_columns := NULLIF(TG_ARGV[3], 'null'); + + IF TG_OP = 'INSERT' THEN + IF columns IS NOT NULL THEN + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns, include_columns); + ELSE + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column); + END IF; + + IF snapshot#>>'{h, -1, c}' != '{}' THEN + NEW.log_data := snapshot; + END IF; + + ELSIF TG_OP = 'UPDATE' THEN + + IF OLD.log_data is NULL OR OLD.log_data = '{}'::jsonb THEN + IF columns IS NOT NULL THEN + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns, include_columns); + ELSE + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column); + END IF; + + IF snapshot#>>'{h, -1, c}' != '{}' THEN + NEW.log_data := snapshot; + END IF; + RETURN NEW; + END IF; + + history_limit := NULLIF(TG_ARGV[0], 'null'); + debounce_time := NULLIF(TG_ARGV[4], 'null'); + + current_version := (NEW.log_data->>'v')::int; + + IF ts_column IS NULL THEN + ts := statement_timestamp(); + ELSE + ts := (to_jsonb(NEW.*)->>ts_column)::timestamp with time zone; + IF ts IS NULL OR ts = (to_jsonb(OLD.*)->>ts_column)::timestamp with time zone THEN + ts := statement_timestamp(); + END IF; + END IF; + + IF NEW = OLD THEN + RETURN NEW; + END IF; + + IF current_version < (NEW.log_data#>>'{h,-1,v}')::int THEN + iterator := 0; + FOR item in SELECT * FROM jsonb_array_elements(NEW.log_data->'h') + LOOP + IF (item.value->>'v')::int > current_version THEN + NEW.log_data := jsonb_set( + NEW.log_data, + '{h}', + (NEW.log_data->'h') - iterator + ); + END IF; + iterator := iterator + 1; + END LOOP; + END IF; + + changes := '{}'; + + IF (coalesce(current_setting('logidze.full_snapshot', true), '') = 'on') THEN + BEGIN + changes = hstore_to_jsonb_loose(hstore(NEW.*)); + EXCEPTION + WHEN NUMERIC_VALUE_OUT_OF_RANGE THEN + changes = row_to_json(NEW.*)::jsonb; + FOR k IN (SELECT key FROM jsonb_each(changes)) + LOOP + IF jsonb_typeof(changes->k) = 'object' THEN + changes = jsonb_set(changes, ARRAY[k], to_jsonb(changes->>k)); + END IF; + END LOOP; + END; + ELSE + BEGIN + changes = hstore_to_jsonb_loose( + hstore(NEW.*) - hstore(OLD.*) + ); + EXCEPTION + WHEN NUMERIC_VALUE_OUT_OF_RANGE THEN + changes = (SELECT + COALESCE(json_object_agg(key, value), '{}')::jsonb + FROM + jsonb_each(row_to_json(NEW.*)::jsonb) + WHERE NOT jsonb_build_object(key, value) <@ row_to_json(OLD.*)::jsonb); + FOR k IN (SELECT key FROM jsonb_each(changes)) + LOOP + IF jsonb_typeof(changes->k) = 'object' THEN + changes = jsonb_set(changes, ARRAY[k], to_jsonb(changes->>k)); + END IF; + END LOOP; + END; + END IF; + + changes = changes - 'log_data'; + + IF columns IS NOT NULL THEN + changes = logidze_filter_keys(changes, columns, include_columns); + END IF; + + IF changes = '{}' THEN + RETURN NEW; + END IF; + + new_v := (NEW.log_data#>>'{h,-1,v}')::int + 1; + + size := jsonb_array_length(NEW.log_data->'h'); + version := logidze_version(new_v, changes, ts); + + IF ( + debounce_time IS NOT NULL AND + (version->>'ts')::bigint - (NEW.log_data#>'{h,-1,ts}')::text::bigint <= debounce_time + ) THEN + -- merge new version with the previous one + new_v := (NEW.log_data#>>'{h,-1,v}')::int; + version := logidze_version(new_v, (NEW.log_data#>'{h,-1,c}')::jsonb || changes, ts); + -- remove the previous version from log + NEW.log_data := jsonb_set( + NEW.log_data, + '{h}', + (NEW.log_data->'h') - (size - 1) + ); + END IF; + + NEW.log_data := jsonb_set( + NEW.log_data, + ARRAY['h', size::text], + version, + true + ); + + NEW.log_data := jsonb_set( + NEW.log_data, + '{v}', + to_jsonb(new_v) + ); + + IF history_limit IS NOT NULL AND history_limit <= size THEN + NEW.log_data := logidze_compact_history(NEW.log_data, size - history_limit + 1); + END IF; + END IF; + + return NEW; + EXCEPTION + WHEN OTHERS THEN + GET STACKED DIAGNOSTICS err_sqlstate = RETURNED_SQLSTATE, + err_message = MESSAGE_TEXT, + err_detail = PG_EXCEPTION_DETAIL, + err_hint = PG_EXCEPTION_HINT, + err_context = PG_EXCEPTION_CONTEXT, + err_schema_name = SCHEMA_NAME, + err_table_name = TABLE_NAME; + err_jsonb := jsonb_build_object( + 'returned_sqlstate', err_sqlstate, + 'message_text', err_message, + 'pg_exception_detail', err_detail, + 'pg_exception_hint', err_hint, + 'pg_exception_context', err_context, + 'schema_name', err_schema_name, + 'table_name', err_table_name + ); + err_captured = logidze_capture_exception(err_jsonb); + IF err_captured THEN + return NEW; + ELSE + RAISE; + END IF; + END; + $function$ + SQL + create_function :logidze_version, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.logidze_version(v bigint, data jsonb, ts timestamp with time zone) + RETURNS jsonb + LANGUAGE plpgsql + AS $function$ + -- version: 2 + DECLARE + buf jsonb; + BEGIN + data = data - 'log_data'; + buf := jsonb_build_object( + 'ts', + (extract(epoch from ts) * 1000)::bigint, + 'v', + v, + 'c', + data + ); + IF coalesce(current_setting('logidze.meta', true), '') <> '' THEN + buf := jsonb_insert(buf, '{m}', current_setting('logidze.meta')::jsonb); + END IF; + RETURN buf; + END; + $function$ + SQL + create_function :logidze_compact_history, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.logidze_compact_history(log_data jsonb, cutoff integer DEFAULT 1) + RETURNS jsonb + LANGUAGE plpgsql + AS $function$ + -- version: 1 + DECLARE + merged jsonb; + BEGIN + LOOP + merged := jsonb_build_object( + 'ts', + log_data#>'{h,1,ts}', + 'v', + log_data#>'{h,1,v}', + 'c', + (log_data#>'{h,0,c}') || (log_data#>'{h,1,c}') + ); + + IF (log_data#>'{h,1}' ? 'm') THEN + merged := jsonb_set(merged, ARRAY['m'], log_data#>'{h,1,m}'); + END IF; + + log_data := jsonb_set( + log_data, + '{h}', + jsonb_set( + log_data->'h', + '{1}', + merged + ) - 0 + ); + + cutoff := cutoff - 1; + + EXIT WHEN cutoff <= 0; + END LOOP; + + return log_data; + END; + $function$ + SQL + create_function :logidze_capture_exception, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.logidze_capture_exception(error_data jsonb) + RETURNS boolean + LANGUAGE plpgsql + AS $function$ + -- version: 1 + BEGIN + -- Feel free to change this function to change Logidze behavior on exception. + -- + -- Return `false` to raise exception or `true` to commit record changes. + -- + -- `error_data` contains: + -- - returned_sqlstate + -- - message_text + -- - pg_exception_detail + -- - pg_exception_hint + -- - pg_exception_context + -- - schema_name + -- - table_name + -- Learn more about available keys: + -- https://www.postgresql.org/docs/9.6/plpgsql-control-structures.html#PLPGSQL-EXCEPTION-DIAGNOSTICS-VALUES + -- + + return false; + END; + $function$ + SQL + create_function :logidze_filter_keys, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.logidze_filter_keys(obj jsonb, keys text[], include_columns boolean DEFAULT false) + RETURNS jsonb + LANGUAGE plpgsql + AS $function$ + -- version: 1 + DECLARE + res jsonb; + key text; + BEGIN + res := '{}'; + + IF include_columns THEN + FOREACH key IN ARRAY keys + LOOP + IF obj ? key THEN + res = jsonb_insert(res, ARRAY[key], obj->key); + END IF; + END LOOP; + ELSE + res = obj; + FOREACH key IN ARRAY keys + LOOP + res = res - key; + END LOOP; + END IF; + + RETURN res; + END; + $function$ + SQL + create_function :calculate_dataset_space, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.calculate_dataset_space(cid integer) + RETURNS bigint + LANGUAGE plpgsql + AS $function$ + declare + used_space bigint default 0; + begin + select sum((attachment_data->'metadata'->'size')::bigint) into used_space + from attachments + where attachable_type = 'Container' and attachable_id = cid + and attachable_id in (select id from containers where container_type = 'dataset'); + return COALESCE(used_space,0); + end;$function$ + SQL + create_function :calculate_element_space, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.calculate_element_space(el_id integer, el_type text) + RETURNS bigint + LANGUAGE plpgsql + AS $function$ + declare + used_space_attachments bigint default 0; + used_space_datasets bigint default 0; + used_space bigint default 0; + begin + select sum((attachment_data->'metadata'->'size')::bigint) into used_space_attachments + from attachments + where attachable_type = el_type and attachable_id = el_id; + used_space = COALESCE(used_space_attachments, 0); + + select sum(calculate_dataset_space(descendant_id)) into used_space_datasets + from container_hierarchies where ancestor_id = (select id from containers where containable_id = el_id and containable_type = el_type); + used_space = used_space + COALESCE(used_space_datasets, 0); + + return COALESCE(used_space, 0); + end;$function$ + SQL + create_function :calculate_collection_space, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.calculate_collection_space(collectionid integer) + RETURNS bigint + LANGUAGE plpgsql + AS $function$ + declare + used_space_samples bigint default 0; + used_space_reactions bigint default 0; + used_space_wellplates bigint default 0; + used_space_screens bigint default 0; + used_space_research_plans bigint default 0; + used_space bigint default 0; + begin + select sum(calculate_element_space(sample_id, 'Sample')) into used_space_samples + from collections_samples + where collection_id = collectionId; + + used_space = COALESCE(used_space_samples,0); + + select sum(calculate_element_space(reaction_id, 'Reaction')) into used_space_reactions + from collections_reactions + where collection_id = collectionId; + + used_space = used_space + COALESCE(used_space_reactions,0); + + select sum(calculate_element_space(wellplate_id, 'Wellplate')) into used_space_wellplates + from collections_wellplates + where collection_id = collectionId; + + used_space = used_space + COALESCE(used_space_wellplates,0); + + select sum(calculate_element_space(screen_id, 'Screen')) into used_space_screens + from collections_screens + where collection_id = collectionId; + + used_space = used_space + COALESCE(used_space_screens,0); + + select sum(calculate_element_space(research_plan_id, 'ResearchPlan')) into used_space_research_plans + from collections_research_plans + where collection_id = collectionId; + + used_space = used_space + COALESCE(used_space_research_plans,0); + + return COALESCE(used_space,0); + end;$function$ + SQL + create_function :calculate_used_space, sql_definition: <<-'SQL' + CREATE OR REPLACE FUNCTION public.calculate_used_space(userid integer) + RETURNS bigint + LANGUAGE plpgsql + AS $function$ + declare + used_space_samples bigint default 0; + used_space_reactions bigint default 0; + used_space_wellplates bigint default 0; + used_space_screens bigint default 0; + used_space_research_plans bigint default 0; + used_space_reports bigint default 0; + used_space_inbox bigint default 0; + used_space bigint default 0; + begin + select sum(calculate_element_space(s.sample_id, 'Sample')) into used_space_samples from ( + select distinct sample_id + from collections_samples + where collection_id in (select id from collections where user_id = userId) + ) s; + used_space = COALESCE(used_space_samples,0); + + select sum(calculate_element_space(r.reaction_id, 'Reaction')) into used_space_reactions from ( + select distinct reaction_id + from collections_reactions + where collection_id in (select id from collections where user_id = userId) + ) r; + used_space = used_space + COALESCE(used_space_reactions,0); + + select sum(calculate_element_space(wp.wellplate_id, 'Wellplate')) into used_space_wellplates from ( + select distinct wellplate_id + from collections_wellplates + where collection_id in (select id from collections where user_id = userId) + ) wp; + used_space = used_space + COALESCE(used_space_wellplates,0); + + select sum(calculate_element_space(wp.screen_id, 'Screen')) into used_space_screens from ( + select distinct screen_id + from collections_screens + where collection_id in (select id from collections where user_id = userId) + ) wp; + used_space = used_space + COALESCE(used_space_screens,0); + + select sum(calculate_element_space(rp.research_plan_id, 'ResearchPlan')) into used_space_research_plans from ( + select distinct research_plan_id + from collections_research_plans + where collection_id in (select id from collections where user_id = userId) + ) rp; + used_space = used_space + COALESCE(used_space_research_plans,0); + + select sum(calculate_element_space(id, 'Report')) into used_space_reports + from reports + where author_id = userId; + used_space = used_space + COALESCE(used_space_reports,0); + + select sum((attachment_data->'metadata'->'size')::bigint) into used_space_inbox + from attachments + where attachable_type = 'Container' + and attachable_id is null and created_for = userId; + -- attachable_id is missing (why?), if this is a bug (and was fixed) change statement to + -- and attachable_id = (select id from containers where containable_type='User' and containable_id=UserID); + used_space = used_space + COALESCE(used_space_inbox,0); + + return COALESCE(used_space,0); + end;$function$ + SQL + create_trigger :logidze_on_attachments, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_attachments BEFORE INSERT OR UPDATE ON public.attachments FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_containers, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_containers BEFORE INSERT OR UPDATE ON public.containers FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_elemental_compositions, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_elemental_compositions BEFORE INSERT OR UPDATE ON public.elemental_compositions FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL create_trigger :update_users_matrix_trg, sql_definition: <<-SQL CREATE TRIGGER update_users_matrix_trg AFTER INSERT OR UPDATE ON public.matrices FOR EACH ROW EXECUTE FUNCTION update_users_matrix() SQL create_trigger :lab_trg_layers_changes, sql_definition: <<-SQL CREATE TRIGGER lab_trg_layers_changes AFTER UPDATE ON public.layers FOR EACH ROW EXECUTE FUNCTION lab_record_layers_changes() SQL + create_trigger :logidze_on_reactions, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_reactions BEFORE INSERT OR UPDATE ON public.reactions FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_reactions_samples, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_reactions_samples BEFORE INSERT OR UPDATE ON public.reactions_samples FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_research_plan_metadata, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_research_plan_metadata BEFORE INSERT OR UPDATE ON public.research_plan_metadata FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_research_plans, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_research_plans BEFORE INSERT OR UPDATE ON public.research_plans FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_residues, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_residues BEFORE INSERT OR UPDATE ON public.residues FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_samples, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_samples BEFORE INSERT OR UPDATE ON public.samples FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_screens, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_screens BEFORE INSERT OR UPDATE ON public.screens FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_wellplates, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_wellplates BEFORE INSERT OR UPDATE ON public.wellplates FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_wells, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_wells BEFORE INSERT OR UPDATE ON public.wells FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + + create_view "v_samples_collections", sql_definition: <<-SQL + SELECT cols.id AS cols_id, + cols.user_id AS cols_user_id, + cols.sample_detail_level AS cols_sample_detail_level, + cols.wellplate_detail_level AS cols_wellplate_detail_level, + cols.shared_by_id AS cols_shared_by_id, + cols.is_shared AS cols_is_shared, + samples.id AS sams_id, + samples.name AS sams_name + FROM ((collections cols + JOIN collections_samples col_samples ON (((col_samples.collection_id = cols.id) AND (col_samples.deleted_at IS NULL)))) + JOIN samples ON (((samples.id = col_samples.sample_id) AND (samples.deleted_at IS NULL)))) + WHERE (cols.deleted_at IS NULL); + SQL create_view "literal_groups", sql_definition: <<-SQL SELECT lits.element_type, From 111d7b796427bf39c4cea051cb5619a814882abb Mon Sep 17 00:00:00 2001 From: Adrian Herrmann Date: Fri, 29 Nov 2024 15:45:48 +0000 Subject: [PATCH 02/17] Added inbox upload check --- app/api/chemotion/attachment_api.rb | 14 +- app/models/attachment.rb | 14 + db/schema.rb | 398 ---------------------- spec/api/chemotion/attachment_api_spec.rb | 29 +- 4 files changed, 50 insertions(+), 405 deletions(-) diff --git a/app/api/chemotion/attachment_api.rb b/app/api/chemotion/attachment_api.rb index 2456910af2..d4f2c3209e 100644 --- a/app/api/chemotion/attachment_api.rb +++ b/app/api/chemotion/attachment_api.rb @@ -258,7 +258,7 @@ def remove_duplicated(att) desc 'Upload files to Inbox as unsorted' post 'upload_to_inbox' do attach_ary = [] - params.each do |_file_id, file| + params.each_value do |file| next unless tempfile = file[:tempfile] # rubocop:disable Lint/AssignmentInCondition attach = Attachment.new( @@ -274,6 +274,8 @@ def remove_duplicated(att) begin attach.save! attach_ary.push(attach.id) + rescue StandardError + status 413 ensure tempfile.close tempfile.unlink @@ -417,7 +419,7 @@ def remove_duplicated(att) desc 'Return Base64 encoded thumbnails' params do - requires :ids, type: Array[Integer] + requires :ids, type: [Integer] end post 'thumbnails' do thumbnails = params[:ids].map do |a_id| @@ -434,7 +436,7 @@ def remove_duplicated(att) desc 'Return Base64 encoded files' params do - requires :ids, type: Array[Integer] + requires :ids, type: [Integer] end post 'files' do files = params[:ids].map do |a_id| @@ -451,8 +453,8 @@ def remove_duplicated(att) desc 'Regenerate spectra' params do - requires :original, type: Array[Integer] - requires :generated, type: Array[Integer] + requires :original, type: [Integer] + requires :generated, type: [Integer] end post 'regenerate_spectrum' do pm = to_rails_snake_case(params) @@ -475,7 +477,7 @@ def remove_duplicated(att) desc 'Regenerate edited spectra' params do - requires :edited, type: Array[Integer] + requires :edited, type: [Integer] optional :molfile, type: String end post 'regenerate_edited_spectrum' do diff --git a/app/models/attachment.rb b/app/models/attachment.rb index 5c75a55f4d..f027a10081 100644 --- a/app/models/attachment.rb +++ b/app/models/attachment.rb @@ -297,6 +297,18 @@ def delete_file_and_thumbnail attachment_attacher.destroy end + def check_user_quota + user = User.find(created_for.nil? ? created_by : created_for) + if (user.used_space + attachment_data['metadata']['size']) > user.available_space && + !user.available_space.zero? + return false + end + + true + rescue ActiveRecord::RecordNotFound + true # creating attachments without user is allowed (for tests) + end + def attach_file return if file_path.nil? return unless File.exist?(file_path) @@ -304,6 +316,8 @@ def attach_file attachment_attacher.attach(File.open(file_path, binmode: true)) raise 'File to large' unless valid? + raise 'User quota exceeded' unless check_user_quota + attachment_attacher.create_derivatives update_column('attachment_data', attachment_data) # rubocop:disable Rails/SkipsModelValidations diff --git a/db/schema.rb b/db/schema.rb index bef231c09d..926bbfccfb 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -72,7 +72,6 @@ t.bigint "filesize" t.jsonb "attachment_data" t.integer "con_state" - t.jsonb "log_data" t.index ["attachable_type", "attachable_id"], name: "index_attachments_on_attachable_type_and_attachable_id" t.index ["identifier"], name: "index_attachments_on_identifier", unique: true end @@ -352,8 +351,6 @@ t.datetime "updated_at", null: false t.integer "parent_id" t.text "plain_text_content" - t.jsonb "log_data" - t.datetime "deleted_at" t.index ["containable_type", "containable_id"], name: "index_containers_on_containable" end @@ -610,7 +607,6 @@ t.float "loading" t.datetime "created_at" t.datetime "updated_at" - t.jsonb "log_data" t.index ["sample_id"], name: "index_elemental_compositions_on_sample_id" end @@ -1068,7 +1064,6 @@ t.text "plain_text_observation" t.jsonb "vessel_size", default: {"unit"=>"ml", "amount"=>nil} t.boolean "gaseous", default: false - t.jsonb "log_data" t.index ["deleted_at"], name: "index_reactions_on_deleted_at" t.index ["rinchi_short_key"], name: "index_reactions_on_rinchi_short_key", order: :desc t.index ["rinchi_web_key"], name: "index_reactions_on_rinchi_web_key" @@ -1172,7 +1167,6 @@ t.text "subject" t.jsonb "alternate_identifier" t.jsonb "related_identifier" - t.jsonb "log_data" t.index ["deleted_at"], name: "index_research_plan_metadata_on_deleted_at" t.index ["research_plan_id"], name: "index_research_plan_metadata_on_research_plan_id" end @@ -1193,7 +1187,6 @@ t.datetime "created_at", null: false t.datetime "updated_at", null: false t.jsonb "body" - t.jsonb "log_data" end create_table "research_plans_screens", force: :cascade do |t| @@ -1222,7 +1215,6 @@ t.hstore "custom_info" t.datetime "created_at", null: false t.datetime "updated_at", null: false - t.jsonb "log_data" t.index ["sample_id"], name: "index_residues_on_sample_id" end @@ -1281,7 +1273,6 @@ t.jsonb "solvent" t.boolean "dry_solvent", default: false t.boolean "inventory_sample", default: false - t.jsonb "log_data" t.index ["deleted_at"], name: "index_samples_on_deleted_at" t.index ["identifier"], name: "index_samples_on_identifier" t.index ["inventory_sample"], name: "index_samples_on_inventory_sample" @@ -1322,7 +1313,6 @@ t.datetime "deleted_at" t.jsonb "component_graph_data", default: {} t.text "plain_text_description" - t.jsonb "log_data" t.index ["deleted_at"], name: "index_screens_on_deleted_at" end @@ -1452,14 +1442,6 @@ t.index ["name"], name: "index_third_party_apps_on_name", unique: true end - create_table "used_space", id: false, force: :cascade do |t| - t.decimal "sum" - end - - create_table "used_space_reports", id: false, force: :cascade do |t| - t.decimal "sum" - end - create_table "user_affiliations", id: :serial, force: :cascade do |t| t.integer "user_id" t.integer "affiliation_id" @@ -1607,7 +1589,6 @@ t.text "plain_text_description" t.integer "width", default: 12 t.integer "height", default: 8 - t.jsonb "log_data" t.index ["deleted_at"], name: "index_wellplates_on_deleted_at" end @@ -1623,7 +1604,6 @@ t.jsonb "readouts", default: [{"unit"=>"", "value"=>""}] t.string "label", default: "Molecular structure", null: false t.string "color_code" - t.jsonb "log_data" t.index ["deleted_at"], name: "index_wells_on_deleted_at" t.index ["sample_id"], name: "index_wells_on_sample_id" t.index ["wellplate_id"], name: "index_wells_on_wellplate_id" @@ -1877,375 +1857,6 @@ END; $function$ SQL - create_function :logidze_snapshot, sql_definition: <<-'SQL' - CREATE OR REPLACE FUNCTION public.logidze_snapshot(item jsonb, ts_column text DEFAULT NULL::text, columns text[] DEFAULT NULL::text[], include_columns boolean DEFAULT false) - RETURNS jsonb - LANGUAGE plpgsql - AS $function$ - -- version: 3 - DECLARE - ts timestamp with time zone; - k text; - BEGIN - item = item - 'log_data'; - IF ts_column IS NULL THEN - ts := statement_timestamp(); - ELSE - ts := coalesce((item->>ts_column)::timestamp with time zone, statement_timestamp()); - END IF; - - IF columns IS NOT NULL THEN - item := logidze_filter_keys(item, columns, include_columns); - END IF; - - FOR k IN (SELECT key FROM jsonb_each(item)) - LOOP - IF jsonb_typeof(item->k) = 'object' THEN - item := jsonb_set(item, ARRAY[k], to_jsonb(item->>k)); - END IF; - END LOOP; - - return json_build_object( - 'v', 1, - 'h', jsonb_build_array( - logidze_version(1, item, ts) - ) - ); - END; - $function$ - SQL - create_function :logidze_logger, sql_definition: <<-'SQL' - CREATE OR REPLACE FUNCTION public.logidze_logger() - RETURNS trigger - LANGUAGE plpgsql - AS $function$ - -- version: 2 - DECLARE - changes jsonb; - version jsonb; - snapshot jsonb; - new_v integer; - size integer; - history_limit integer; - debounce_time integer; - current_version integer; - k text; - iterator integer; - item record; - columns text[]; - include_columns boolean; - ts timestamp with time zone; - ts_column text; - err_sqlstate text; - err_message text; - err_detail text; - err_hint text; - err_context text; - err_table_name text; - err_schema_name text; - err_jsonb jsonb; - err_captured boolean; - BEGIN - ts_column := NULLIF(TG_ARGV[1], 'null'); - columns := NULLIF(TG_ARGV[2], 'null'); - include_columns := NULLIF(TG_ARGV[3], 'null'); - - IF TG_OP = 'INSERT' THEN - IF columns IS NOT NULL THEN - snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns, include_columns); - ELSE - snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column); - END IF; - - IF snapshot#>>'{h, -1, c}' != '{}' THEN - NEW.log_data := snapshot; - END IF; - - ELSIF TG_OP = 'UPDATE' THEN - - IF OLD.log_data is NULL OR OLD.log_data = '{}'::jsonb THEN - IF columns IS NOT NULL THEN - snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns, include_columns); - ELSE - snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column); - END IF; - - IF snapshot#>>'{h, -1, c}' != '{}' THEN - NEW.log_data := snapshot; - END IF; - RETURN NEW; - END IF; - - history_limit := NULLIF(TG_ARGV[0], 'null'); - debounce_time := NULLIF(TG_ARGV[4], 'null'); - - current_version := (NEW.log_data->>'v')::int; - - IF ts_column IS NULL THEN - ts := statement_timestamp(); - ELSE - ts := (to_jsonb(NEW.*)->>ts_column)::timestamp with time zone; - IF ts IS NULL OR ts = (to_jsonb(OLD.*)->>ts_column)::timestamp with time zone THEN - ts := statement_timestamp(); - END IF; - END IF; - - IF NEW = OLD THEN - RETURN NEW; - END IF; - - IF current_version < (NEW.log_data#>>'{h,-1,v}')::int THEN - iterator := 0; - FOR item in SELECT * FROM jsonb_array_elements(NEW.log_data->'h') - LOOP - IF (item.value->>'v')::int > current_version THEN - NEW.log_data := jsonb_set( - NEW.log_data, - '{h}', - (NEW.log_data->'h') - iterator - ); - END IF; - iterator := iterator + 1; - END LOOP; - END IF; - - changes := '{}'; - - IF (coalesce(current_setting('logidze.full_snapshot', true), '') = 'on') THEN - BEGIN - changes = hstore_to_jsonb_loose(hstore(NEW.*)); - EXCEPTION - WHEN NUMERIC_VALUE_OUT_OF_RANGE THEN - changes = row_to_json(NEW.*)::jsonb; - FOR k IN (SELECT key FROM jsonb_each(changes)) - LOOP - IF jsonb_typeof(changes->k) = 'object' THEN - changes = jsonb_set(changes, ARRAY[k], to_jsonb(changes->>k)); - END IF; - END LOOP; - END; - ELSE - BEGIN - changes = hstore_to_jsonb_loose( - hstore(NEW.*) - hstore(OLD.*) - ); - EXCEPTION - WHEN NUMERIC_VALUE_OUT_OF_RANGE THEN - changes = (SELECT - COALESCE(json_object_agg(key, value), '{}')::jsonb - FROM - jsonb_each(row_to_json(NEW.*)::jsonb) - WHERE NOT jsonb_build_object(key, value) <@ row_to_json(OLD.*)::jsonb); - FOR k IN (SELECT key FROM jsonb_each(changes)) - LOOP - IF jsonb_typeof(changes->k) = 'object' THEN - changes = jsonb_set(changes, ARRAY[k], to_jsonb(changes->>k)); - END IF; - END LOOP; - END; - END IF; - - changes = changes - 'log_data'; - - IF columns IS NOT NULL THEN - changes = logidze_filter_keys(changes, columns, include_columns); - END IF; - - IF changes = '{}' THEN - RETURN NEW; - END IF; - - new_v := (NEW.log_data#>>'{h,-1,v}')::int + 1; - - size := jsonb_array_length(NEW.log_data->'h'); - version := logidze_version(new_v, changes, ts); - - IF ( - debounce_time IS NOT NULL AND - (version->>'ts')::bigint - (NEW.log_data#>'{h,-1,ts}')::text::bigint <= debounce_time - ) THEN - -- merge new version with the previous one - new_v := (NEW.log_data#>>'{h,-1,v}')::int; - version := logidze_version(new_v, (NEW.log_data#>'{h,-1,c}')::jsonb || changes, ts); - -- remove the previous version from log - NEW.log_data := jsonb_set( - NEW.log_data, - '{h}', - (NEW.log_data->'h') - (size - 1) - ); - END IF; - - NEW.log_data := jsonb_set( - NEW.log_data, - ARRAY['h', size::text], - version, - true - ); - - NEW.log_data := jsonb_set( - NEW.log_data, - '{v}', - to_jsonb(new_v) - ); - - IF history_limit IS NOT NULL AND history_limit <= size THEN - NEW.log_data := logidze_compact_history(NEW.log_data, size - history_limit + 1); - END IF; - END IF; - - return NEW; - EXCEPTION - WHEN OTHERS THEN - GET STACKED DIAGNOSTICS err_sqlstate = RETURNED_SQLSTATE, - err_message = MESSAGE_TEXT, - err_detail = PG_EXCEPTION_DETAIL, - err_hint = PG_EXCEPTION_HINT, - err_context = PG_EXCEPTION_CONTEXT, - err_schema_name = SCHEMA_NAME, - err_table_name = TABLE_NAME; - err_jsonb := jsonb_build_object( - 'returned_sqlstate', err_sqlstate, - 'message_text', err_message, - 'pg_exception_detail', err_detail, - 'pg_exception_hint', err_hint, - 'pg_exception_context', err_context, - 'schema_name', err_schema_name, - 'table_name', err_table_name - ); - err_captured = logidze_capture_exception(err_jsonb); - IF err_captured THEN - return NEW; - ELSE - RAISE; - END IF; - END; - $function$ - SQL - create_function :logidze_version, sql_definition: <<-'SQL' - CREATE OR REPLACE FUNCTION public.logidze_version(v bigint, data jsonb, ts timestamp with time zone) - RETURNS jsonb - LANGUAGE plpgsql - AS $function$ - -- version: 2 - DECLARE - buf jsonb; - BEGIN - data = data - 'log_data'; - buf := jsonb_build_object( - 'ts', - (extract(epoch from ts) * 1000)::bigint, - 'v', - v, - 'c', - data - ); - IF coalesce(current_setting('logidze.meta', true), '') <> '' THEN - buf := jsonb_insert(buf, '{m}', current_setting('logidze.meta')::jsonb); - END IF; - RETURN buf; - END; - $function$ - SQL - create_function :logidze_compact_history, sql_definition: <<-'SQL' - CREATE OR REPLACE FUNCTION public.logidze_compact_history(log_data jsonb, cutoff integer DEFAULT 1) - RETURNS jsonb - LANGUAGE plpgsql - AS $function$ - -- version: 1 - DECLARE - merged jsonb; - BEGIN - LOOP - merged := jsonb_build_object( - 'ts', - log_data#>'{h,1,ts}', - 'v', - log_data#>'{h,1,v}', - 'c', - (log_data#>'{h,0,c}') || (log_data#>'{h,1,c}') - ); - - IF (log_data#>'{h,1}' ? 'm') THEN - merged := jsonb_set(merged, ARRAY['m'], log_data#>'{h,1,m}'); - END IF; - - log_data := jsonb_set( - log_data, - '{h}', - jsonb_set( - log_data->'h', - '{1}', - merged - ) - 0 - ); - - cutoff := cutoff - 1; - - EXIT WHEN cutoff <= 0; - END LOOP; - - return log_data; - END; - $function$ - SQL - create_function :logidze_capture_exception, sql_definition: <<-'SQL' - CREATE OR REPLACE FUNCTION public.logidze_capture_exception(error_data jsonb) - RETURNS boolean - LANGUAGE plpgsql - AS $function$ - -- version: 1 - BEGIN - -- Feel free to change this function to change Logidze behavior on exception. - -- - -- Return `false` to raise exception or `true` to commit record changes. - -- - -- `error_data` contains: - -- - returned_sqlstate - -- - message_text - -- - pg_exception_detail - -- - pg_exception_hint - -- - pg_exception_context - -- - schema_name - -- - table_name - -- Learn more about available keys: - -- https://www.postgresql.org/docs/9.6/plpgsql-control-structures.html#PLPGSQL-EXCEPTION-DIAGNOSTICS-VALUES - -- - - return false; - END; - $function$ - SQL - create_function :logidze_filter_keys, sql_definition: <<-'SQL' - CREATE OR REPLACE FUNCTION public.logidze_filter_keys(obj jsonb, keys text[], include_columns boolean DEFAULT false) - RETURNS jsonb - LANGUAGE plpgsql - AS $function$ - -- version: 1 - DECLARE - res jsonb; - key text; - BEGIN - res := '{}'; - - IF include_columns THEN - FOREACH key IN ARRAY keys - LOOP - IF obj ? key THEN - res = jsonb_insert(res, ARRAY[key], obj->key); - END IF; - END LOOP; - ELSE - res = obj; - FOREACH key IN ARRAY keys - LOOP - res = res - key; - END LOOP; - END IF; - - RETURN res; - END; - $function$ - SQL create_function :calculate_dataset_space, sql_definition: <<-'SQL' CREATE OR REPLACE FUNCTION public.calculate_dataset_space(cid integer) RETURNS bigint @@ -2397,15 +2008,6 @@ SQL - create_trigger :logidze_on_attachments, sql_definition: <<-SQL - CREATE TRIGGER logidze_on_attachments BEFORE INSERT OR UPDATE ON public.attachments FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') - SQL - create_trigger :logidze_on_containers, sql_definition: <<-SQL - CREATE TRIGGER logidze_on_containers BEFORE INSERT OR UPDATE ON public.containers FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') - SQL - create_trigger :logidze_on_elemental_compositions, sql_definition: <<-SQL - CREATE TRIGGER logidze_on_elemental_compositions BEFORE INSERT OR UPDATE ON public.elemental_compositions FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') - SQL create_trigger :update_users_matrix_trg, sql_definition: <<-SQL CREATE TRIGGER update_users_matrix_trg AFTER INSERT OR UPDATE ON public.matrices FOR EACH ROW EXECUTE FUNCTION update_users_matrix() SQL diff --git a/spec/api/chemotion/attachment_api_spec.rb b/spec/api/chemotion/attachment_api_spec.rb index 4aed3957d6..ac8edc437e 100644 --- a/spec/api/chemotion/attachment_api_spec.rb +++ b/spec/api/chemotion/attachment_api_spec.rb @@ -279,7 +279,34 @@ end describe 'POST /api/v1/attachments/upload_to_inbox' do - pending 'not yet implemented' + let(:user) { create(:person) } + let(:file_upload) do + { + file: fixture_file_upload(Rails.root.join('spec/fixtures/upload.txt'), 'text/plain'), + } + end + + context 'when upload works' do + before do + post '/api/v1/attachments/upload_to_inbox', params: file_upload + end + + it 'expecting return code 201' do + expect(response).to have_http_status :created + end + end + + context 'when upload is not allowed' do + before do + user.available_space = 1 + user.save! + post '/api/v1/attachments/upload_to_inbox', params: file_upload + end + + it 'expecting return code 413' do + expect(response).to have_http_status 413 + end + end end describe 'GET /api/v1/attachments/{attachment_id}' do From 1b8ec4832c3a499b416b62c457fb41e60d19bcd7 Mon Sep 17 00:00:00 2001 From: Adrian Herrmann Date: Mon, 2 Dec 2024 09:48:25 +0000 Subject: [PATCH 03/17] refactor: rm commented out code --- app/models/attachment.rb | 20 --------- spec/models/attachment_spec.rb | 82 ---------------------------------- 2 files changed, 102 deletions(-) diff --git a/app/models/attachment.rb b/app/models/attachment.rb index f027a10081..cca51bee4a 100644 --- a/app/models/attachment.rb +++ b/app/models/attachment.rb @@ -46,16 +46,12 @@ class Attachment < ApplicationRecord validate :check_file_size before_create :generate_key - # TODO: rm this during legacy store cleaning - # before_create :add_content_type # reload to get identifier:uuid after_create :reload after_destroy :delete_file_and_thumbnail after_save :attach_file # TODO: rm this during legacy store cleaning - # after_save :update_filesize - # TODO: rm this during legacy store cleaning # after_save :add_checksum, if: :new_upload belongs_to :attachable, polymorphic: true, optional: true @@ -192,28 +188,12 @@ def rewrite_file_data! self end - def update_filesize - self.filesize = file_data.bytesize if file_data.present? - self.filesize = File.size(file_path) if file_path.present? && File.exist?(file_path) - update_column('filesize', filesize) # rubocop:disable Rails/SkipsModelValidations - end - # Rewrite read attribute for filesize def filesize # read_attribute(:filesize).presence || attachment['size'] attachment && attachment['size'] end - def add_content_type - return if content_type.present? - - self.content_type = begin - MimeMagic.by_path(filename)&.type - rescue StandardError - nil - end - end - # Rewrite read attribute for content_type def content_type # read_attribute(:content_type).presence || attachment['mime_type'] diff --git a/spec/models/attachment_spec.rb b/spec/models/attachment_spec.rb index 0caf2c2913..1402e808b6 100644 --- a/spec/models/attachment_spec.rb +++ b/spec/models/attachment_spec.rb @@ -199,88 +199,6 @@ end end - # describe '#update_filesize' do - # before do - # # this is just to have an easier base to compare from - # attachment.filesize = 0 - # end - # - # context 'when attachment has file_path set' do - # let(:expected_filesize) { File.size(attachment.file_path) } - # - # before do - # attachment.file_path = File.join("#{Rails.root}/spec/fixtures/upload.txt") - # end - # - # it 'sets the filesize attributes to the filesize of the pointed file' do - # expect { attachment.update_filesize }.to change(attachment, :filesize).from(0).to(expected_filesize) - # end - # end - # - # - # context 'when attachment has file_data set' do - # let(:file_data) { 'Foo Bar' } - # let(:expected_filesize) { file_data.bytesize } - # - # before do - # attachment.file_data = file_data - # attachment.file_path = nil - # end - # - # it 'sets the filesize attribute to the size of the file_data accessor\'s content' do - # expect { attachment.update_filesize }.to change(attachment, :filesize).from(0).to(expected_filesize) - # end - # end - # - # context 'when attachment has both file_path and file_data set' do - # let(:new_file_data) { 'Foo Bar' } - # let(:file_path) { File.join("#{Rails.root}/spec/fixtures/upload.txt") } - # let(:expected_filesize) { File.size(file_path) } - # - # # this has to match the logic in attachment.store.write_file, so the correct filesize is used - # it 'sets the filesize to the size of the file pointed at by file_path' do - # expect { attachment.update_filesize }.to change(attachment, :filesize).from(0).to(expected_filesize) - # end - # end - # - # context 'when attachment has neither file nor file_data' do - # before do - # attachment.file_path = nil - # attachment.file_data = nil - # end - # - # it 'does not change the filesize attribute' do - # expect { attachment.update_filesize }.not_to change(attachment, :filesize) - # end - # end - # end - # - # describe '#add_content_type' do - # context 'when content_type is present' do - # before do - # attachment.content_type = 'foobar' - # end - # - # it 'does not change the content_type field' do - # attachment.add_content_type - # - # expect(attachment.content_type).to eq 'foobar' - # end - # end - # - # context 'when content_type is missing' do - # before do - # attachment.content_type = nil - # end - # - # it 'guesses the content_type based on the file extension' do - # attachment.add_content_type - # - # expect(attachment.content_type).to eql 'text/plain' - # end - # end - # end - describe 'type_image?' do let(:image_attachment) { create(:attachment, :with_image) } let(:text_attachment) { create(:attachment) } From 86452ca8334f04f37614a8a84997671305815934 Mon Sep 17 00:00:00 2001 From: Adrian Herrmann Date: Mon, 2 Dec 2024 14:35:26 +0000 Subject: [PATCH 04/17] fix: error messages from attachment save --- app/api/chemotion/attachable_api.rb | 8 +++++--- app/usecases/attachments/upload_chunk_complete.rb | 13 +++++++------ 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/app/api/chemotion/attachable_api.rb b/app/api/chemotion/attachable_api.rb index 1fdc0f9821..879ad5c930 100644 --- a/app/api/chemotion/attachable_api.rb +++ b/app/api/chemotion/attachable_api.rb @@ -6,11 +6,11 @@ module Chemotion class AttachableAPI < Grape::API resource :attachable do params do - optional :files, type: Array[File], desc: 'files', default: [] + optional :files, type: [File], desc: 'files', default: [] optional :attachable_type, type: String, desc: 'attachable_type' optional :attachable_id, type: Integer, desc: 'attachable id' - optional :attfilesIdentifier, type: Array[String], desc: 'file identifier' - optional :del_files, type: Array[Integer], desc: 'del file id', default: [] + optional :attfilesIdentifier, type: [String], desc: 'file identifier' + optional :del_files, type: [Integer], desc: 'del file id', default: [] end after_validation do case params[:attachable_type] @@ -51,6 +51,8 @@ class AttachableAPI < Grape::API if a.attachable_type.in?(%w[ResearchPlan Wellplate DeviceDescription Labimotion::Element]) rp_attach_ary.push(a.id) end + rescue StandardError + status 413 ensure tempfile.close tempfile.unlink diff --git a/app/usecases/attachments/upload_chunk_complete.rb b/app/usecases/attachments/upload_chunk_complete.rb index 5029ac77e2..ba758ded12 100644 --- a/app/usecases/attachments/upload_chunk_complete.rb +++ b/app/usecases/attachments/upload_chunk_complete.rb @@ -14,7 +14,7 @@ def self.execute!(user, params) new(user, params).execute! end - def execute! # rubocop:disable Metrics/AbcSize,Metrics/MethodLength + def execute! # rubocop:disable Metrics/AbcSize file_name = ActiveStorage::Filename.new(params[:filename]).sanitized FileUtils.mkdir_p(Rails.root.join('tmp/uploads/full')) entries = Dir["#{Rails.root.join('tmp/uploads/chunks', params[:key])}*"].sort_by do |s| @@ -41,7 +41,8 @@ def execute! # rubocop:disable Metrics/AbcSize,Metrics/MethodLength FileUtils.rm_f(file_path) end - def create_attachment(file_name, file_path) # rubocop:disable Metrics/MethodLength + def create_attachment(file_name, file_path) + ok = true attachment = Attachment.new( bucket: nil, filename: file_name, @@ -54,13 +55,13 @@ def create_attachment(file_name, file_path) # rubocop:disable Metrics/MethodLeng status_text = [] begin attachment.save! - rescue StandardError - status_text = [attachment.errors.to_h[:attachment]] # rubocop:disable Rails/DeprecatedActiveModelErrorsMethods + rescue StandardError => e + status_text = [e.message.empty? ? attachment.errors.to_h[:attachment] : e.message] # rubocop:disable Rails/DeprecatedActiveModelErrorsMethods + ok = false end - { ok: true, statusText: status_text } + { ok: ok, statusText: status_text } end - end end end From 9d4dbd5a2bc1d5ad5d8c3bc627c259420a1c2725 Mon Sep 17 00:00:00 2001 From: Adrian Herrmann Date: Tue, 3 Dec 2024 14:02:29 +0000 Subject: [PATCH 05/17] added warning before 'keep changes' in dataset --- app/api/entities/user_entity.rb | 2 + .../container/ContainerDatasetModalContent.js | 108 +++-- db/schema.rb | 393 ++++++++++++++++++ 3 files changed, 458 insertions(+), 45 deletions(-) diff --git a/app/api/entities/user_entity.rb b/app/api/entities/user_entity.rb index cf1cbc3b46..98beee3ea3 100644 --- a/app/api/entities/user_entity.rb +++ b/app/api/entities/user_entity.rb @@ -7,6 +7,8 @@ class UserEntity < Grape::Entity expose :first_name, documentation: { type: 'String', desc: "User's name" } expose :last_name, documentation: { type: 'String', desc: "User's name" } expose :initials, documentation: { type: 'String', desc: 'initials' } + expose :used_space, documentation: { type: 'Integer', desc: "User's used storage space" } + expose :available_space, documentation: { type: 'Integer', desc: "User's available storage space (0=infinite)" } expose :samples_count, documentation: { type: 'Integer', desc: 'Sample count' } expose :reactions_count, documentation: { type: 'Integer', desc: 'Reactions count' } expose :cell_lines_count, documentation: { type: 'Integer', desc: 'Cellline Samples count' } diff --git a/app/javascript/src/components/container/ContainerDatasetModalContent.js b/app/javascript/src/components/container/ContainerDatasetModalContent.js index d46f04e495..ea7254af0d 100644 --- a/app/javascript/src/components/container/ContainerDatasetModalContent.js +++ b/app/javascript/src/components/container/ContainerDatasetModalContent.js @@ -2,7 +2,9 @@ /* eslint-disable react/destructuring-assignment */ import React, { Component } from 'react'; import ReactDOM from 'react-dom'; -import { Form, ListGroup, ListGroupItem, Button, Overlay, ButtonToolbar } from 'react-bootstrap'; +import { + Form, ListGroup, ListGroupItem, Button, Overlay, ButtonToolbar, Alert +} from 'react-bootstrap'; import Dropzone from 'react-dropzone'; import EditorFetcher from 'src/fetchers/EditorFetcher'; import SaveEditedImageWarning from 'src/apps/mydb/elements/details/researchPlans/SaveEditedImageWarning'; @@ -59,7 +61,7 @@ export class ContainerDatasetModalContent extends Component { imageEditModalShown: false, filteredAttachments: [...props.datasetContainer.attachments], prevMessages: [], - newMessages:[], + newMessages: [], filterText: '', attachmentGroups: { Original: [], @@ -81,6 +83,7 @@ export class ContainerDatasetModalContent extends Component { this.handleAttachmentRemove = this.handleAttachmentRemove.bind(this); this.handleAttachmentBackToInbox = this.handleAttachmentBackToInbox.bind(this); this.classifyAttachments = this.classifyAttachments.bind(this); + this.checkUserQuota = this.checkUserQuota.bind(this); this.state.attachmentGroups = this.classifyAttachments(props.datasetContainer.attachments); } @@ -126,8 +129,8 @@ export class ContainerDatasetModalContent extends Component { let combinedAttachments = [...filteredAttachments]; if (this.context.attachmentNotificationStore) { - combinedAttachments = this.context.attachmentNotificationStore.getCombinedAttachments( - filteredAttachments, 'Container', datasetContainer); + combinedAttachments = this.context.attachmentNotificationStore + .getCombinedAttachments(filteredAttachments, 'Container', datasetContainer); } return combinedAttachments; }; @@ -465,6 +468,14 @@ export class ContainerDatasetModalContent extends Component { ); } + checkUserQuota() { + const { filteredAttachments } = this.state; + const totalSize = filteredAttachments.filter((attachment) => !attachment.is_deleted) + .reduce((acc, attachment) => acc + attachment.filesize, 0); + const { currentUser } = UserStore.getState(); + return currentUser.available_space !== 0 && totalSize > (currentUser.available_space - currentUser.used_space); + } + renderImageEditModal() { const { chosenAttachment, imageEditModalShown } = this.state; const { onChange } = this.props; @@ -525,7 +536,9 @@ export class ContainerDatasetModalContent extends Component {
Created: - {formatDate(attachment.created_at)} + + {formatDate(attachment.created_at)} +
|
@@ -547,32 +560,32 @@ export class ContainerDatasetModalContent extends Component {