From 647cdc0a1ce5577c9a42734a5c1d5288f13e2907 Mon Sep 17 00:00:00 2001 From: Karen Etheridge Date: Fri, 3 Jan 2020 16:51:48 -0800 Subject: [PATCH 1/2] new script to copy user_account and user_session_token data between databases This is especially handy when loading a database backup from another system (e.g. production) into the local system, but we want to preserve existing passwords and authentication tokens. It's easy to copy over session tokens into a pre-truncated user_session_token table: pg_dump -U conch --data-only -t user_session_token conch_orig | psql -U conch conch ...but not so simple to update existing user_account records with original passwords. --- .../modules/Conch::Command::copy_user_data.md | 41 ++++++ lib/Conch/Command/copy_user_data.pm | 128 ++++++++++++++++++ 2 files changed, 169 insertions(+) create mode 100644 docs/modules/Conch::Command::copy_user_data.md create mode 100644 lib/Conch/Command/copy_user_data.pm diff --git a/docs/modules/Conch::Command::copy_user_data.md b/docs/modules/Conch::Command::copy_user_data.md new file mode 100644 index 000000000..53cd7c890 --- /dev/null +++ b/docs/modules/Conch::Command::copy_user_data.md @@ -0,0 +1,41 @@ +# NAME + +copy\_user\_data - copy user data (user records and authentication tokens) between databases + +# SYNOPSIS + +``` +bin/conch copy_user_data [long options...] + + --from name of database to copy from (required) + --to name of database to copy to (required) + -n --dry-run dry-run (no changes are made) + + --help print usage message and exit +``` + +# DESCRIPTION + +Use this script after restoring a database backup to a separate database, before swapping it into place to go live. e.g.: + +```perl +psql -U postgres --command="create database conch_prod_$(date '+%Y%m%d) owner conch" +pg_restore -U postgres -d conch_prod_$(date '+%Y%m%d') -j 3 -v /path/to/$(date '+%Y-%m-%d')T00:00:00Z; date + +psql -U postgres --command="create database conch_staging_$(date '+%Y%m%d')_user_bak owner conch" +psql -U postgres conch_staging_$(date '+%Y%m%d')_user_bak --command="CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public" +pg_dump -U conch --inserts -t user_account -t user_session_token conch | psql -U conch conch_staging_$(date '+%Y%m%d')_user_bak +carton exec bin/conch copy_user_data --from conch_staging_$(date '+%Y%m%d')_user_bak --to conch_prod_$(date '+%Y%m%d') + +carton exec hypnotoad -s bin/conch +psql -U postgres --command="rename database conch conch_staging_$(date '+%Y%m%d')_bak; rename database conch_prod_$(date '+%Y%m%d') conch" +carton exec hypnotoad bin/conch +``` + +# LICENSING + +Copyright Joyent, Inc. + +This Source Code Form is subject to the terms of the Mozilla Public License, +v.2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at [http://mozilla.org/MPL/2.0/](http://mozilla.org/MPL/2.0/). diff --git a/lib/Conch/Command/copy_user_data.pm b/lib/Conch/Command/copy_user_data.pm new file mode 100644 index 000000000..f290f72f9 --- /dev/null +++ b/lib/Conch/Command/copy_user_data.pm @@ -0,0 +1,128 @@ +package Conch::Command::copy_user_data; + +=pod + +=head1 NAME + +copy_user_data - copy user data (user records and authentication tokens) between databases + +=head1 SYNOPSIS + + bin/conch copy_user_data [long options...] + + --from name of database to copy from (required) + --to name of database to copy to (required) + -n --dry-run dry-run (no changes are made) + + --help print usage message and exit + +=head1 DESCRIPTION + +Use this script after restoring a database backup to a separate database, before swapping it into place to go live. e.g.: + + psql -U postgres --command="create database conch_prod_$(date '+%Y%m%d) owner conch" + pg_restore -U postgres -d conch_prod_$(date '+%Y%m%d') -j 3 -v /path/to/$(date '+%Y-%m-%d')T00:00:00Z; date + + psql -U postgres --command="create database conch_staging_$(date '+%Y%m%d')_user_bak owner conch" + psql -U postgres conch_staging_$(date '+%Y%m%d')_user_bak --command="CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public" + pg_dump -U conch --inserts -t user_account -t user_session_token conch | psql -U conch conch_staging_$(date '+%Y%m%d')_user_bak + carton exec bin/conch copy_user_data --from conch_staging_$(date '+%Y%m%d')_user_bak --to conch_prod_$(date '+%Y%m%d') + + carton exec hypnotoad -s bin/conch + psql -U postgres --command="rename database conch conch_staging_$(date '+%Y%m%d')_bak; rename database conch_prod_$(date '+%Y%m%d') conch" + carton exec hypnotoad bin/conch + +=cut + +use Mojo::Base 'Mojolicious::Command', -signatures; +use Getopt::Long::Descriptive; +use Try::Tiny; +use Data::Page; + +has description => 'Copy user records and authentication tokens between databases'; + +has usage => sub { shift->extract_usage }; # extracts from SYNOPSIS + +sub run ($self, @opts) { + local @ARGV = @opts; + my ($opt, $usage) = describe_options( + # the descriptions aren't actually used anymore (mojo uses the synopsis instead)... but + # the 'usage' text block can be accessed with $usage->text + 'copy_user_data %o', + [ 'from=s', 'name of database to copy from', { required => 1 } ], + [ 'to=s', 'name of database to copy to', { required => 1 } ], + [ 'dry-run|n', 'dry-run (no changes are made)' ], + [], + [ 'help', 'print usage message and exit', { shortcircuit => 1 } ], + ); + + my $app = $self->app; + my $app_name = $app->moniker.'-copy_user_data-'.$app->version_tag.' ('.$$.')'; + my $db_credentials = Conch::DB::Util::get_credentials($app->config->{database}, $app->log); + + my ($from_schema, $to_schema) = map Conch::DB->connect( + $db_credentials->{dsn} =~ s/(?<=dbi:Pg:dbname=)([^;]+)(?=;host=)/$_/r, + $db_credentials->@{qw(username password)}, + +{ + $db_credentials->{options}->%*, + on_connect_do => [ q{set application_name to '}.$app_name.q{'} ], + }, + ), $opt->from, $opt->to; + + my $from_user_rs = $from_schema->resultset('user_account')->hri; + my $to_user_rs = $to_schema->resultset('user_account'); + + if ($opt->dry_run) { + say '# '.$from_user_rs->count.' user records would be inserted or updated.'; + } + else { + my ($updated, $created) = (0,0); + while (my $user_data = $from_user_rs->next) { + # update_or_create calls update($data) which calls set_inflated_columns, + # which will corrupt password entries + my $to_rs = $to_user_rs->hri->search({ id => $user_data->{id} }); + if ($to_rs->exists) { + $to_rs->update($user_data); + ++$updated; + } + else { + my $row = $to_user_rs->new_result({}); + # we do not use set_columns, because DBIx::Class::PassphraseColumn + # inappropriately wraps it to encrypt the data. + $row->store_column($_, $user_data->{$_}) for keys %$user_data; + $row->insert; + ++$created; + } + } + say '# user_account: '.$created.' rows inserted, '.$updated.' updated.'; + } + + my $from_token_rs = $from_schema->resultset('user_session_token')->hri; + my $to_token_rs = $to_schema->resultset('user_session_token'); + + if ($opt->dry_run) { + say '# '.$from_token_rs->count.' user_session_token rows would be inserted.'; + } + else { + my $count = $from_token_rs->count; + $to_token_rs->delete; + $to_token_rs->populate([ $from_token_rs->all ]); + say '# user_session_token: '.$count.' rows inserted (all previous rows removed)'; + } +} + +1; +__END__ + +=pod + +=head1 LICENSING + +Copyright Joyent, Inc. + +This Source Code Form is subject to the terms of the Mozilla Public License, +v.2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at L. + +=cut +# vim: set ts=4 sts=4 sw=4 et : From cd3bd331e509c25a1dff7b1019e9376936942a1f Mon Sep 17 00:00:00 2001 From: Karen Etheridge Date: Tue, 7 Jan 2020 09:30:52 -0800 Subject: [PATCH 2/2] bump Time::Local prereq for y2.02k bug --- cpanfile | 1 + cpanfile.snapshot | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/cpanfile b/cpanfile index a2edec163..91a10cfc3 100644 --- a/cpanfile +++ b/cpanfile @@ -16,6 +16,7 @@ requires 'Try::Tiny'; requires 'Time::HiRes'; requires 'Time::Moment', '>= 0.43'; # for PR#28, fixes use of stdbool.h (thanks Dale) requires 'JSON::Validator', '3.04'; +requires 'Time::Local', '1.27'; # https://pandorafms.com/blog/2020-perl/ requires 'Data::Validate::IP'; # for json schema validation of 'ipv4', 'ipv6' types requires 'HTTP::Tiny'; requires 'Safe::Isa'; diff --git a/cpanfile.snapshot b/cpanfile.snapshot index 19bc1ad69..8fa3ab225 100644 --- a/cpanfile.snapshot +++ b/cpanfile.snapshot @@ -3654,6 +3654,17 @@ DISTRIBUTIONS requirements: ExtUtils::MakeMaker 0 Test::More 0 + Time-Local-1.28 + pathname: D/DR/DROLSKY/Time-Local-1.28.tar.gz + provides: + Time::Local 1.28 + requirements: + Carp 0 + Exporter 0 + ExtUtils::MakeMaker 0 + constant 0 + parent 0 + strict 0 Time-Moment-0.44 pathname: C/CH/CHANSEN/Time-Moment-0.44.tar.gz provides: