diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml index f708bc36..d5a1ed78 100644 --- a/deploy/docker-compose.yml +++ b/deploy/docker-compose.yml @@ -21,6 +21,38 @@ services: max-size: "200k" max-file: "10" + sci054edapi: + image: 235945186688.dkr.ecr.ap-southeast-2.amazonaws.com/edapi:latest + restart: always + env_file: + - sci054.env + depends_on: + - db + - sci054scs + networks: + - backend + logging: + driver: json-file + options: + max-size: "200k" + max-file: "10" + + sci054scs: + image: 235945186688.dkr.ecr.ap-southeast-2.amazonaws.com/supplychainserver:latest + restart: always + env_file: + - sci054.env + depends_on: + - db + networks: + - backend + command: --orhost or --orport 8200 --init-db -c "postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db/sci054supplychainserver" + logging: + driver: json-file + options: + max-size: "200k" + max-file: "10" + sci041edapi: image: 235945186688.dkr.ecr.ap-southeast-2.amazonaws.com/edapi:latest restart: always @@ -53,6 +85,38 @@ services: max-size: "200k" max-file: "10" + sci042edapi: + image: 235945186688.dkr.ecr.ap-southeast-2.amazonaws.com/edapi:latest + restart: always + env_file: + - sci042.env + depends_on: + - db + - sci042scs + networks: + - backend + logging: + driver: json-file + options: + max-size: "200k" + max-file: "10" + + sci042scs: + image: 235945186688.dkr.ecr.ap-southeast-2.amazonaws.com/supplychainserver:latest + restart: always + env_file: + - sci042.env + depends_on: + - db + networks: + - backend + command: --orhost or --orport 8200 --init-db -c "postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db/sci042supplychainserver" + logging: + driver: json-file + options: + max-size: "200k" + max-file: "10" + sci033edapi: image: 235945186688.dkr.ecr.ap-southeast-2.amazonaws.com/edapi:latest restart: always diff --git a/deploy/get_and_start_new_version.sh b/deploy/get_and_start_new_version.sh index 62d57392..2296558b 100644 --- a/deploy/get_and_start_new_version.sh +++ b/deploy/get_and_start_new_version.sh @@ -24,7 +24,9 @@ OR_USER=$(aws --region ap-southeast-2 secretsmanager get-secret-value --secret-i OR_PASSWORD=$(aws --region ap-southeast-2 secretsmanager get-secret-value --secret-id development | jq -r .SecretString | sed 's/\\//g' | jq -r .OR_PASSWORD) OAUTH_SUB=$(aws --region ap-southeast-2 secretsmanager get-secret-value --secret-id development | jq -r .SecretString | sed 's/\\//g' | jq -r .OAUTH_SUB) SCI033_OAUTH_SUB=$(aws --region ap-southeast-2 secretsmanager get-secret-value --secret-id development | jq -r .SecretString | sed 's/\\//g' | jq -r .SCI033_OAUTH_SUB) +SCI042_OAUTH_SUB=$(aws --region ap-southeast-2 secretsmanager get-secret-value --secret-id development | jq -r .SecretString | sed 's/\\//g' | jq -r .SCI042_OAUTH_SUB) SCI041_OAUTH_SUB=$(aws --region ap-southeast-2 secretsmanager get-secret-value --secret-id development | jq -r .SecretString | sed 's/\\//g' | jq -r .SCI041_OAUTH_SUB) +SCI054_OAUTH_SUB=$(aws --region ap-southeast-2 secretsmanager get-secret-value --secret-id development | jq -r .SecretString | sed 's/\\//g' | jq -r .SCI054_OAUTH_SUB) echo "POSTGRES_USER=$POSTGRES_USER POSTGRES_PASSWORD=$POSTGRES_PASSWORD @@ -37,11 +39,22 @@ EDAPI_DB_CONN=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db/sci033edapi DEST_HOST=sci033scs DEST_PORT=8000" > '/home/ec2-user/sci033.env' +echo "JWK_CLIENT_IDS=$SCI042_OAUTH_SUB +EDAPI_DB_CONN=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db/sci042edapi +DEST_HOST=sci042scs +DEST_PORT=8000" > '/home/ec2-user/sci042.env' + echo "JWK_CLIENT_IDS=$SCI041_OAUTH_SUB EDAPI_DB_CONN=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db/sci041edapi DEST_HOST=sci041scs DEST_PORT=8000" > '/home/ec2-user/sci041.env' +echo "JWK_CLIENT_IDS=$SCI054_OAUTH_SUB +EDAPI_DB_CONN=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db/sci054edapi +DEST_HOST=sci054scs +DEST_PORT=8000" > '/home/ec2-user/sci054.env' + + # Start the database (either an empty postgres container, or an existing database) docker-compose up -d db @@ -49,7 +62,7 @@ echo Waiting 10 seconds for the db to finish starting... sleep 10 # Start the services -docker-compose up -d web orgRegistry sci041edapi sci041scs sci033edapi sci033scs # private-ethereum-blockchain blockchain-api-server +docker-compose up -d web orgRegistry sci033edapi sci033scs sci042edapi sci042scs sci041edapi sci041scs sci054edapi sci054scs # private-ethereum-blockchain blockchain-api-server # remove all unused docker images and exited containers # docker system prune -a --force diff --git a/projects/mirza-common-haskell/src/Mirza/Common/Database.hs b/projects/mirza-common-haskell/src/Mirza/Common/Database.hs index ed6cbe40..59dc7a07 100644 --- a/projects/mirza-common-haskell/src/Mirza/Common/Database.hs +++ b/projects/mirza-common-haskell/src/Mirza/Common/Database.hs @@ -4,40 +4,68 @@ {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TemplateHaskell #-} module Mirza.Common.Database ( Migration , runMigrationSimple , dropTablesSimple , createTrigger + , createTriggerFunction + , SchemaVerificationResult(..) + , checkSchemaAgainstBeam ) where import Mirza.Common.Types import Control.Lens (view, _1) -import Control.Monad ( void, forM_ ) -import Data.List ( drop, zip ) +import Control.Monad (void, forM_) +import Data.List (drop, zip) import Data.String (fromString) -import Data.Text ( unpack ) +import Data.Text (unpack) +import Database.Beam.Migrate.Simple (verifySchema, VerificationResult(..)) +import Database.Beam.Migrate.Types (CheckedDatabaseSettings) +import Database.Beam.Postgres (Postgres, runBeamPostgres) +import Database.Beam.Postgres.Migrate (migrationBackend) +import Database.Beam.Schema.Tables (Database) import Database.PostgreSQL.Simple +import Katip + +data SchemaVerificationResult = SchemaMatch | SchemaMismatch [String] + +checkSchemaAgainstBeam :: ( Member c '[HasLogging, HasDB] + , Member err '[AsSqlError] + , Database Postgres db + ) + => c -> CheckedDatabaseSettings Postgres db -> IO (Either err SchemaVerificationResult) +checkSchemaAgainstBeam context db = runAppM context $ do + runDb $ do + conn <- view _1 + liftIO $ runBeamPostgres conn $ verifySchema migrationBackend db >>= \case + VerificationSucceeded -> pure SchemaMatch + VerificationFailed xs -> pure $ SchemaMismatch $ fmap show xs type Migration = Connection -> IO () runMigrationSimple :: ( Member c '[HasLogging, HasDB], Member err '[AsSqlError]) => c -> [Migration] -> IO (Either err ()) runMigrationSimple c migrations = runAppM c $ runDb $ do conn <- view _1 - liftIO $ do - _ <- execute_ conn "SET client_min_messages = WARNING;" - n <- getVersion conn - forM_ (drop n (zip @Int [1..] migrations)) $ \(i, m) -> do - m conn - execute conn "INSERT INTO version (number, executed) VALUES (?, now());" (Only i) + n <- liftIO $ getVersion conn + + $(logTM) InfoS $ logStr $ mconcat [ "Database schema @ " + , show n + , "/" + , show (length migrations) + ] + + forM_ (drop n (zip @Int [1..] migrations)) $ \(i, m) -> liftIO $ do + m conn + execute conn "INSERT INTO version (number, executed) VALUES (?, now());" (Only i) dropTablesSimple :: ( Member c '[HasLogging, HasDB], Member err '[AsSqlError]) => c -> IO (Either err ()) dropTablesSimple c = runAppM c $ runDb $ do conn <- view _1 liftIO $ do - _ <- execute_ conn "SET client_min_messages = WARNING;" tables <- query_ conn "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';" forM_ tables $ \t -> execute_ conn $ "DROP TABLE IF EXISTS " <> (fromString (unpack (fromOnly t))) <> " CASCADE;" @@ -52,15 +80,17 @@ getVersion conn = do _ -> pure 0 _ -> 0 <$ execute_ conn "CREATE TABLE version (number INTEGER NOT NULL PRIMARY KEY, executed timestamptz NOT NULL);" -createTrigger :: Connection -> Query -> IO () -createTrigger conn tName = void $ execute_ conn $ +createTriggerFunction :: Connection -> IO () +createTriggerFunction conn = void $ execute_ conn $ "CREATE OR REPLACE FUNCTION sync_lastmod() RETURNS trigger AS $$ \ \BEGIN \ \NEW.last_update := NOW() AT TIME ZONE 'UTC'; \ \RETURN NEW; \ \END; \ - \$$ LANGUAGE plpgsql; \ - \DROP TRIGGER IF EXISTS sync_lastmod ON \"" <> tName <> "\";" <> + \$$ LANGUAGE plpgsql;" + +createTrigger :: Connection -> Query -> IO () +createTrigger conn tName = void $ execute_ conn $ "CREATE TRIGGER sync_lastmod \ - \BEFORE UPDATE OR INSERT ON \"" <> tName <> - "\" FOR EACH ROW EXECUTE PROCEDURE sync_lastmod();" + \ BEFORE UPDATE OR INSERT ON \"" <> tName <> "\" \ + \ FOR EACH ROW EXECUTE PROCEDURE sync_lastmod();" diff --git a/projects/mirza-common-haskell/src/Mirza/Common/Types.hs b/projects/mirza-common-haskell/src/Mirza/Common/Types.hs index 73224788..7edcbb40 100644 --- a/projects/mirza-common-haskell/src/Mirza/Common/Types.hs +++ b/projects/mirza-common-haskell/src/Mirza/Common/Types.hs @@ -267,7 +267,6 @@ class AsSqlError a where instance AsSqlError SqlError where _SqlError = id - -- Logging classes -- =============== diff --git a/projects/or_scs/Mirza.cabal b/projects/or_scs/Mirza.cabal index 2f3d78ac..f3d6b78d 100644 --- a/projects/or_scs/Mirza.cabal +++ b/projects/or_scs/Mirza.cabal @@ -40,6 +40,7 @@ library , Mirza.SupplyChain.Database.Migrate , Mirza.SupplyChain.Database.Schema , Mirza.SupplyChain.Database.Schema.V0001 + , Mirza.SupplyChain.Database.Schema.SQL.V0001 , Mirza.SupplyChain.Handlers.Health , Mirza.SupplyChain.EventUtils , Mirza.SupplyChain.Handlers.Queries @@ -68,6 +69,7 @@ library , hashable , hoist-error >= 0.2 , http-client + , http-media , insert-ordered-containers , jose , katip >= 0.5.4 diff --git a/projects/or_scs/src/Mirza/OrgRegistry/Database/Migrate.hs b/projects/or_scs/src/Mirza/OrgRegistry/Database/Migrate.hs index 28ad59cd..18024880 100644 --- a/projects/or_scs/src/Mirza/OrgRegistry/Database/Migrate.hs +++ b/projects/or_scs/src/Mirza/OrgRegistry/Database/Migrate.hs @@ -3,14 +3,20 @@ module Mirza.OrgRegistry.Database.Migrate ( migrations , dropTablesSimple ) where -import Database.PostgreSQL.Simple -import Mirza.Common.Database +import Mirza.Common.Database + +import Data.String ( fromString ) +import Database.PostgreSQL.Simple + migrations :: [Migration] migrations = [ m_0001 ] m_0001 :: Migration m_0001 conn = do + [(Only x)] <- query_ conn "SELECT current_database();" + _ <- execute_ conn $ "ALTER DATABASE " <> fromString x <>" SET client_min_messages TO WARNING"; + _ <- execute_ conn "CREATE TABLE orgs (org_gs1_company_prefix TEXT PRIMARY KEY, org_name TEXT NOT NULL, org_url TEXT NOT NULL, last_update TIMESTAMP);" _ <- execute_ conn "CREATE TABLE location (location_gln TEXT PRIMARY KEY, location_org_id TEXT NOT NULL REFERENCES orgs(org_gs1_company_prefix) ON DELETE CASCADE, last_update TIMESTAMP);" _ <- execute_ conn "CREATE TABLE users (oauth_sub TEXT PRIMARY KEY, last_update TIMESTAMP);" @@ -19,6 +25,7 @@ m_0001 conn = do _ <- execute_ conn "CREATE TABLE org_mapping (mapping_org_id TEXT NOT NULL REFERENCES orgs(org_gs1_company_prefix) ON DELETE CASCADE, mapping_user_oauth_sub TEXT NOT NULL REFERENCES users(oauth_sub) ON DELETE CASCADE, last_update TIMESTAMP, PRIMARY KEY(mapping_org_id, mapping_user_oauth_sub))" _ <- execute_ conn "CREATE TABLE keys (key_id UUID PRIMARY KEY, key_org TEXT NOT NULL REFERENCES orgs(org_gs1_company_prefix) ON DELETE CASCADE, jwk JSON NOT NULL, creation_time TIMESTAMP, revocation_time TIMESTAMP, revoking_user_id TEXT REFERENCES users(oauth_sub) ON DELETE CASCADE, expiration_time TIMESTAMP, last_update TIMESTAMP);" + createTriggerFunction conn createTrigger conn "orgs" createTrigger conn "location" createTrigger conn "users" diff --git a/projects/or_scs/src/Mirza/OrgRegistry/Main.hs b/projects/or_scs/src/Mirza/OrgRegistry/Main.hs index 3b694a5e..20313696 100644 --- a/projects/or_scs/src/Mirza/OrgRegistry/Main.hs +++ b/projects/or_scs/src/Mirza/OrgRegistry/Main.hs @@ -9,6 +9,7 @@ module Mirza.OrgRegistry.Main where import Mirza.Common.Types as CT import Mirza.Common.Utils (fetchJWKS) +import Mirza.Common.Database (SchemaVerificationResult(..), checkSchemaAgainstBeam) import Mirza.OrgRegistry.API (API, ServerAPI, api) import Mirza.OrgRegistry.Auth import Mirza.OrgRegistry.Database.Migrate @@ -49,10 +50,11 @@ import Text.Email.Parser (addrSpec) import Control.Exception (finally) import Control.Lens (review) -import Control.Monad (when) +import Control.Monad (when, forM_) import Data.Either (fromRight) import Data.Maybe (fromMaybe, listToMaybe) import Katip as K +import System.Exit (exitFailure) import System.IO (FilePath, IOMode (AppendMode), hPutStr, openFile, stderr, @@ -95,6 +97,7 @@ data InitOptionsOR = InitOptionsOR data ExecMode = RunServer RunServerOptions | InitDb + | CheckDb | UserAction UserCommand | OrgAction OrgCommand | PopulateDatabase -- TODO: This option should be removed....this is for testing and debugging only. @@ -111,6 +114,7 @@ data RunServerOptions = RunServerOptions { runServerOptionsPortNumber :: Int , runServerOptionsOAuthJWKPath :: URI , runServerOptionsOAuthAudience :: Text + , runServerOptionsAutoMigrate :: Bool } data UserCommand @@ -140,8 +144,9 @@ multiplexInitOptions :: InitOptionsOR -> IO () multiplexInitOptions (InitOptionsOR opts mode) = case mode of RunServer rsOpts -> launchServer opts rsOpts InitDb -> runMigration opts + CheckDb -> checkMigration opts UserAction uc -> runUserCommand opts uc - OrgAction bc -> runOrgCommand opts bc + OrgAction bc -> runOrgCommand opts bc PopulateDatabase -> runPopulateDatabase opts Bootstrap oAuthSubSuffix companyPrefix -> runBootstrap opts oAuthSubSuffix companyPrefix @@ -156,13 +161,21 @@ launchServer opts rso = do minimalContext <- initORContext opts completeContext <- addServerOptions minimalContext rso app <- initApplication completeContext + + when (runServerOptionsAutoMigrate rso) $ do + putStr "Migrating database... " + either (error . show) pure =<< + runMigrationSimple @ORContextComplete @SqlError completeContext migrations + putStrLn "Done." + + mids <- initMiddleware opts rso - putStrLn $ "http://localhost:" ++ show portNumber ++ "/swagger-ui/" + putStrLn $ "Listening on http://localhost:" ++ show portNumber ++ "/swagger-ui/" Warp.run (fromIntegral portNumber) (mids app) `finally` closeScribes (ORT._orKatipLogEnv completeContext) addServerOptions :: ORContextMinimal -> RunServerOptions -> IO ORContextComplete -addServerOptions minimalContext (RunServerOptions _port oAuthPublicKeyRef oauthAudience) = addAuthOptions minimalContext oAuthPublicKeyRef oauthAudience +addServerOptions minimalContext (RunServerOptions _port oAuthPublicKeyRef oauthAudience _) = addAuthOptions minimalContext oAuthPublicKeyRef oauthAudience addAuthOptions :: ORContextMinimal -> URI -> Text -> IO ORContextComplete @@ -194,7 +207,7 @@ getJWKS _ = error $ "Unsupported URI schema type." initORContext :: ServerOptionsOR -> IO ORT.ORContextMinimal initORContext (ServerOptionsOR dbConnStr lev mlogPath envT) = do logHandle <- maybe (pure stdout) (flip openFile AppendMode) mlogPath - hPutStr stderr $ "(Logging will be to: " ++ fromMaybe "stdout" mlogPath ++ ") " + hPutStr stderr $ "Logging will be to: " ++ fromMaybe "stdout" mlogPath ++ "\n" handleScribe <- mkHandleScribe ColorIfTerminal logHandle lev V3 logEnv <- initLogEnv "orgRegistry" (Environment . pack . show $ envT) >>= registerScribe "stdout" handleScribe defaultScribeSettings @@ -246,6 +259,18 @@ runMigration opts = do res <- runMigrationSimple @ORContextMinimal @SqlError ctx migrations print res +checkMigration :: ServerOptionsOR -> IO () +checkMigration opts = do + ctx <- initORContext opts + res <- checkSchemaAgainstBeam @ORContextMinimal @SqlError ctx checkedOrgRegistryDB + case res of + Left e -> print ("Couldn't check schema: " <> show e) >> exitFailure + Right SchemaMatch -> print @String "Schema match" + Right (SchemaMismatch xs) -> do + print @String "Schema doesn't match: " + forM_ xs print + exitFailure + -------------------------------------------------------------------------------- -- User Command @@ -401,8 +426,9 @@ serverOptions = InitOptionsOR , standardCommand "initdb" initDb "Initialise the Database (Note: This command only works if the database \ \is empty and can't be used for migrations or if the database already \ \contains the schema." + , standardCommand "checkdb" (pure CheckDb) "Compare the internal beam schema against the database schema" , standardCommand "user" userCommand "Interactively add new users" - , standardCommand "org" orgCommand "Operations on orgs" + , standardCommand "org" orgCommand "Operations on orgs" , standardCommand "populate" populateDb "Populate the database with dummy test data" , standardCommand "bootstrap" bootstrap "Bootstrap a user into the database." ] @@ -431,6 +457,11 @@ runServer = RunServer <$> (RunServerOptions <> short 'a' <> help "OAuth audience claim to match against user tokens." ) + <*> switch + ( + long "migrate" + <> help "Perform database migrations before starting the service" + ) ) parsedServerOptions :: Parser ServerOptionsOR @@ -465,7 +496,6 @@ parsedServerOptions = ServerOptionsOR initDb :: Parser ExecMode initDb = pure InitDb - userCommand :: Parser ExecMode userCommand = UserAction <$> userCommands diff --git a/projects/or_scs/src/Mirza/SupplyChain/API.hs b/projects/or_scs/src/Mirza/SupplyChain/API.hs index f0121486..ac465675 100644 --- a/projects/or_scs/src/Mirza/SupplyChain/API.hs +++ b/projects/or_scs/src/Mirza/SupplyChain/API.hs @@ -22,8 +22,11 @@ import Mirza.SupplyChain.Types as ST import Mirza.SupplyChain.Handlers.UXUtils (PrettyEventResponse (..)) import qualified Data.GS1.Event as Ev +import Data.GS1.Parser.Parser ( parseBS ) import Data.GS1.EventId as EvId +import Data.ByteString.Lazy ( ByteString ) +import Network.HTTP.Media ( (//) ) import Servant import Servant.Swagger.UI @@ -39,6 +42,22 @@ serverAPI :: Proxy ServerAPI serverAPI = Proxy +data XML + +class FromXML a where + fromXML :: ByteString -> Either String a + +instance Accept XML where + contentType _ = "text" // "xml" + +instance FromXML a => MimeUnrender XML a where + mimeUnrender _ = fromXML + +instance FromXML Ev.Event where + fromXML bs = case parseBS bs of + [] -> Left "No events in document" + (x:_) -> either (Left . show) Right x + type ServerAPI = -- Health "healthz" :> Get '[JSON] HealthResponse @@ -56,7 +75,7 @@ type ServerAPI = :> Capture "eventId" EventId :> Get '[JSON] EventInfo -- Event Registration - :<|> "event" :> ReqBody '[JSON] Ev.Event + :<|> "event" :> ReqBody '[JSON, XML] Ev.Event :> Post '[JSON] (EventInfo, Schema.EventId) -- UI :<|> "prototype" :> "list" :> "events" diff --git a/projects/or_scs/src/Mirza/SupplyChain/Database/Migrate.hs b/projects/or_scs/src/Mirza/SupplyChain/Database/Migrate.hs index da896dec..b125d870 100644 --- a/projects/or_scs/src/Mirza/SupplyChain/Database/Migrate.hs +++ b/projects/or_scs/src/Mirza/SupplyChain/Database/Migrate.hs @@ -9,7 +9,9 @@ module Mirza.SupplyChain.Database.Migrate where import Mirza.SupplyChain.Database.Schema.V0001 (migration) +import Mirza.SupplyChain.Database.Schema.SQL.V0001 ( m_0001 ) +import Mirza.Common.Database (runMigrationSimple) import Mirza.Common.Types import Mirza.Common.Utils import Mirza.SupplyChain.Database.Schema (supplyChainDb) @@ -20,15 +22,13 @@ import Control.Monad (void) import System.Exit (exitFailure) import System.IO (hPutStrLn, stderr) -import Data.ByteString.Char8 (ByteString) import Database.Beam () import Database.Beam.Backend (runNoReturn) import Database.Beam.Migrate.Types (executeMigration) import Database.Beam.Postgres (Connection, Pg, runBeamPostgres, runBeamPostgresDebug) -import Database.PostgreSQL.Simple (SqlError, - connectPostgreSQL) +import Database.PostgreSQL.Simple (SqlError) @@ -56,12 +56,8 @@ tryCreateSchema runSilently conn = E.catch (createSchema runSilently conn) handl hPutStrLn stderr $ "Migration failed with error: " <> show err exitFailure -migrate :: (Member context '[HasLogging, HasDB]) - => context -> ByteString -> IO () -migrate ctx connStr = do - conn <- connectPostgreSQL connStr - r <- runMigrationWithTriggers conn ctx - case r of - Left (err :: SqlError) -> print $ "Table could not be created. Error: " <> show err - Right _succ -> print $ "Successfully created table with conn: " <> show connStr - +migrate :: ( Member context '[HasLogging, HasDB] ) => context -> IO (Either SqlError ()) +migrate ctx = runMigrationSimple ctx migrations + where + migrations = [ m_0001 + ] diff --git a/projects/or_scs/src/Mirza/SupplyChain/Database/Schema/SQL/V0001.hs b/projects/or_scs/src/Mirza/SupplyChain/Database/Schema/SQL/V0001.hs new file mode 100644 index 00000000..36543e85 --- /dev/null +++ b/projects/or_scs/src/Mirza/SupplyChain/Database/Schema/SQL/V0001.hs @@ -0,0 +1,197 @@ +{-# LANGUAGE QuasiQuotes #-} + +module Mirza.SupplyChain.Database.Schema.SQL.V0001 where + +import Mirza.Common.Database + +import Data.String ( fromString ) +import Database.PostgreSQL.Simple +import Database.PostgreSQL.Simple.SqlQQ (sql) + + +-- SQL + +m_0001 :: Migration +m_0001 conn = do + [(Only x)] <- query_ conn "SELECT current_database();" + _ <- execute_ conn $ "ALTER DATABASE " <> fromString x <>" SET client_min_messages TO WARNING"; + + + _ <- execute_ conn [sql| +CREATE OR REPLACE FUNCTION public.sync_lastmod() RETURNS trigger + LANGUAGE plpgsql + AS $$ BEGIN NEW.last_update := NOW() AT TIME ZONE 'UTC'; RETURN NEW; END; $$; +|] + + _ <- execute_ conn [sql| +CREATE TABLE public.orgs ( + last_update timestamp without time zone DEFAULT now(), + org_gs1_company_prefix text PRIMARY KEY, + org_name character varying(120) NOT NULL +); +|] + + _ <- execute_ conn [sql| +CREATE TABLE public.locations ( + last_update timestamp without time zone DEFAULT now(), + location_id text PRIMARY KEY, + location_org_id text REFERENCES orgs(org_gs1_company_prefix) ON DELETE CASCADE, + location_function character varying(120) NOT NULL, + location_site_name character varying(120) NOT NULL, + location_address character varying(120) NOT NULL, + location_lat double precision, + location_long double precision +); +|] + + _ <- execute_ conn [sql| +CREATE TABLE public.events ( + last_update timestamp without time zone DEFAULT now(), + event_id uuid PRIMARY KEY, + event_foreign_event_id uuid, + event_json json NOT NULL, + event_to_sign bytea NOT NULL UNIQUE +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.labels ( + last_update timestamp without time zone DEFAULT now(), + label_id uuid PRIMARY KEY, + label_gs1_company_prefix text NOT NULL, + label_item_reference text, + label_serial_number text, + label_state character varying(120), + label_lot text, + label_sgtin_filter_value text, + label_asset_type text, + label_quantity_amount double precision, + label_quantity_uom text, + label_urn text NOT NULL UNIQUE +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.label_events ( + last_update timestamp without time zone DEFAULT now(), + label_event_id uuid PRIMARY KEY, + label_event_label_id uuid REFERENCES labels(label_id) ON DELETE CASCADE, + label_event_event_id uuid REFERENCES events(event_id) ON DELETE CASCADE, + label_event_label_type text +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.whats ( + last_update timestamp without time zone DEFAULT now(), + what_id uuid PRIMARY KEY, + what_event_type text, + what_action text, + what_parent uuid REFERENCES labels(label_id) ON DELETE CASCADE, + what_org_transaction_id uuid, + what_transformation_id uuid, + what_event_id uuid REFERENCES events(event_id) ON DELETE CASCADE +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.what_labels ( + last_update timestamp without time zone DEFAULT now(), + what_label_id uuid PRIMARY KEY, + what_label_what_id uuid REFERENCES whats(what_id) ON DELETE CASCADE, + what_label_label_id uuid REFERENCES labels(label_id) ON DELETE CASCADE, + what_label_label_type text +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.org_transactions ( + last_update timestamp without time zone DEFAULT now(), + org_transaction_id uuid PRIMARY KEY, + org_transaction_type_id character varying(120), + org_transaction_id_urn character varying(120), + org_transaction_event_id uuid REFERENCES events(event_id) ON DELETE CASCADE +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.transformations ( + last_update timestamp without time zone DEFAULT now(), + transformation_id uuid PRIMARY KEY, + transformation_description character varying(120) NOT NULL, + transformation_org_id text REFERENCES orgs(org_gs1_company_prefix) ON DELETE CASCADE +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.whens ( + last_update timestamp without time zone DEFAULT now(), + when_id uuid PRIMARY KEY, + when_event_time timestamp without time zone NOT NULL, + when_record_time timestamp without time zone, + when_time_zone character varying(10) NOT NULL, + when_event_id uuid REFERENCES events(event_id) ON DELETE CASCADE +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.wheres ( + last_update timestamp without time zone DEFAULT now(), + where_id uuid PRIMARY KEY, + where_gs1_company_prefix text, + where_source_dest_type text, + where_gs1_location_id text, + where_location_field text NOT NULL, + where_sgln_ext text, + where_geo text, + where_event_id uuid REFERENCES events(event_id) ON DELETE CASCADE +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.whys ( + last_update timestamp without time zone DEFAULT now(), + why_id uuid PRIMARY KEY, + why_org_step text, + why_disposition text, + why_event_id uuid REFERENCES events(event_id) ON DELETE CASCADE +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.blockchain ( + last_update timestamp without time zone DEFAULT now(), + blockchain_id uuid PRIMARY KEY, + blockchain_event_id uuid NOT NULL REFERENCES events(event_id) ON DELETE CASCADE, + blockchain_hash bytea NOT NULL, + blockchain_address text NOT NULL, + blockchain_foreign_id integer NOT NULL +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.hashes ( + last_update timestamp without time zone DEFAULT now(), + hashes_id uuid PRIMARY KEY, + hashes_event_id uuid NOT NULL REFERENCES events(event_id) ON DELETE CASCADE, + hashes_hash bytea NOT NULL, + hashes_is_signed boolean NOT NULL, + hashes_key_id uuid NOT NULL +); +|] + _ <- execute_ conn [sql| +CREATE TABLE public.signatures ( + last_update timestamp without time zone DEFAULT now(), + signature_id uuid PRIMARY KEY, + signature_event_id uuid NOT NULL REFERENCES events(event_id) ON DELETE CASCADE, + signature_key_id uuid NOT NULL, + signature_signature json NOT NULL, + signature_timestamp timestamp without time zone NOT NULL +); +|] + + createTriggerFunction conn + createTrigger conn "orgs" + createTrigger conn "labels" + createTrigger conn "transformations" + createTrigger conn "events" + createTrigger conn "org_transactions" + createTrigger conn "whats" + createTrigger conn "what_labels" + createTrigger conn "locations" + createTrigger conn "whys" + createTrigger conn "wheres" + createTrigger conn "whens" + createTrigger conn "label_events" + createTrigger conn "signatures" + createTrigger conn "hashes" + createTrigger conn "blockchain" diff --git a/projects/or_scs/src/Mirza/SupplyChain/Database/Schema/V0001.hs b/projects/or_scs/src/Mirza/SupplyChain/Database/Schema/V0001.hs index 569cd628..9005ecb3 100644 --- a/projects/or_scs/src/Mirza/SupplyChain/Database/Schema/V0001.hs +++ b/projects/or_scs/src/Mirza/SupplyChain/Database/Schema/V0001.hs @@ -44,9 +44,9 @@ import Database.Beam.Migrate.Types (CheckedDatabaseSettings, import Database.Beam.Postgres (PgJSON, Postgres, bytea, json, text) import Database.Beam.Postgres.Migrate (uuid) - import Crypto.JOSE (CompactJWS, JWSHeader) + -------------------------------------------------------------------------------- -- Constants and Utils -------------------------------------------------------------------------------- diff --git a/projects/or_scs/src/Mirza/SupplyChain/Main.hs b/projects/or_scs/src/Mirza/SupplyChain/Main.hs index 553216ef..23e531be 100644 --- a/projects/or_scs/src/Mirza/SupplyChain/Main.hs +++ b/projects/or_scs/src/Mirza/SupplyChain/Main.hs @@ -32,11 +32,12 @@ import Options.Applicative import Control.Lens +import Control.Monad (when) import Control.Exception (finally) import Data.Maybe (fromMaybe) import Katip as K -import System.Exit (exitFailure) +import System.Exit (die, exitFailure) import System.IO (IOMode (AppendMode), hPutStrLn, openFile, stderr, stdout) @@ -109,23 +110,32 @@ main = runProgram =<< execParser opts runProgram :: ServerOptionsSCS -> IO () runProgram so@ServerOptionsSCS{initDB = True, dbPopulateInfo =Just _, orServiceInfo =Just _} = do ctx <- initSCSContext so - migrate ctx $ connectionStr so - runDbPopulate so -runProgram so@ServerOptionsSCS{initDB =False, dbPopulateInfo =Just _, orServiceInfo =Just _} = runDbPopulate so -runProgram so@ServerOptionsSCS{initDB = False, scsServiceInfo=(scsHst, scsPort), orServiceInfo =Just _} = do + either (die . show) (const $ runDbPopulate so) =<< migrate ctx + +runProgram so@ServerOptionsSCS{initDB = False, dbPopulateInfo = Just _, orServiceInfo = Just _} = runDbPopulate so + +runProgram so@ServerOptionsSCS{initDB, dbPopulateInfo = Nothing, orServiceInfo = Just _, scsServiceInfo=(scsHst, scsPort)} = do ctx <- initSCSContext so + when initDB $ do + putStr "Migrating database... " + either (die . show) pure =<< migrate ctx + putStrLn "Done." app <- initApplication so ctx putStrLn $ "http://" <> scsHst <> ":" <> show scsPort <> "/swagger-ui/" Warp.run (fromIntegral scsPort) app `finally` closeScribes (ctx ^. ST.scsKatipLogEnv) + +runProgram so@ServerOptionsSCS{initDB = True, dbPopulateInfo = Nothing, orServiceInfo = Nothing} = do + ctx <- initSCSContext so + either (die . show) pure =<< migrate ctx + runProgram ServerOptionsSCS{initDB = False, orServiceInfo = Nothing} = do hPutStrLn stderr "Required unless initialising the database: --orhost ARG --orport ARG" exitFailure + runProgram ServerOptionsSCS{initDB = True, dbPopulateInfo = Just _, orServiceInfo =Nothing} = do hPutStrLn stderr "Required for populating the database: --orhost ARG --orport ARG" exitFailure -runProgram so@ServerOptionsSCS{initDB = True, dbPopulateInfo = Nothing} = do - ctx <- initSCSContext so - migrate ctx $ connectionStr so + runDbPopulate :: ServerOptionsSCS -> IO () runDbPopulate so = do diff --git a/projects/or_scs/stack.yaml b/projects/or_scs/stack.yaml index aa6a0ae6..baa62b63 100644 --- a/projects/or_scs/stack.yaml +++ b/projects/or_scs/stack.yaml @@ -13,7 +13,7 @@ extra-deps: - git: https://github.com/sajidanower23/hs-jose.git commit: d7aca4d13b27235af47d522b5d093e567750628e - git: https://github.com/data61/GS1Combinators.git - commit: 7f0f0c9c2ec6d91378ac56ba63de033fcc313326 # Head on 2019-06-14 + commit: ddb92e443055e7719fa46afa5913cf2276cfd71e # Head on 2019-08-13 - servant-flatten-0.2 - hoist-error-0.2.1.0 diff --git a/projects/or_scs/stack.yaml.lock b/projects/or_scs/stack.yaml.lock index 605bb621..5a0137f9 100644 --- a/projects/or_scs/stack.yaml.lock +++ b/projects/or_scs/stack.yaml.lock @@ -68,18 +68,18 @@ packages: commit: d7aca4d13b27235af47d522b5d093e567750628e - completed: cabal-file: - size: 2908 - sha256: cd111c999f253f8985a8b223cc3e493b97cd82cc2fbb03779afe54590ce29738 + size: 2942 + sha256: 8895b8bd234fe57d281f6a4db32c56850b9f0640fd07cb9753a09b8fbe1ce36e name: GS1Combinators version: 0.1.0.0 git: https://github.com/data61/GS1Combinators.git pantry-tree: size: 8991 - sha256: 5c079cf09bfdce60cb82ac2454a30631b3565067a202dd5ea0351a95cb2edec7 - commit: 7f0f0c9c2ec6d91378ac56ba63de033fcc313326 + sha256: 3f045313a67962e750526f46a42ca7190feac345fa6eb1384c17655e532564c8 + commit: ddb92e443055e7719fa46afa5913cf2276cfd71e original: git: https://github.com/data61/GS1Combinators.git - commit: 7f0f0c9c2ec6d91378ac56ba63de033fcc313326 + commit: ddb92e443055e7719fa46afa5913cf2276cfd71e - completed: hackage: servant-flatten-0.2@sha256:276896f7c5cdec5b8f8493f6205fded0cc602d050b58fdb09a6d7c85c3bb0837,1234 pantry-tree: diff --git a/projects/or_scs/test/Mirza/SupplyChain/Spec.hs b/projects/or_scs/test/Mirza/SupplyChain/Spec.hs index 5b6a89b1..feccfd13 100644 --- a/projects/or_scs/test/Mirza/SupplyChain/Spec.hs +++ b/projects/or_scs/test/Mirza/SupplyChain/Spec.hs @@ -1,8 +1,10 @@ +{-# LANGUAGE ScopedTypeVariables #-} module Main where +import Mirza.Common.Database ( dropTablesSimple ) + import Mirza.SupplyChain.Database.Migrate -import Mirza.SupplyChain.Database.Schema import Mirza.SupplyChain.Main hiding (main) import Mirza.SupplyChain.Types as ST @@ -23,8 +25,7 @@ import Control.Exception (bracket) import Control.Monad.Except (runExceptT) import Database.Beam.Postgres -import Data.Pool (Pool, destroyAllResources, - withResource) +import Data.Pool (Pool, destroyAllResources) import qualified Data.Pool as Pool import Katip (Severity (DebugS)) @@ -43,12 +44,26 @@ defaultPool = Pool.createPool (connectPostgreSQL connectionString) close openConnection :: IO SCSContext openConnection = do tempFile <- emptySystemTempFile "supplyChainServerTests.log" - connpool <- defaultPool - _ <- withResource connpool $ dropTables supplyChainDb -- drop tables before so if already exist no problems... means tables get overwritten though - withResource connpool (tryCreateSchema True) + --connpool <- defaultPool + --_ <- withResource connpool $ dropTables supplyChainDb -- drop tables before so if already exist no problems... means tables get overwritten though + --withResource connpool (tryCreateSchema True) let connectionString = getDatabaseConnectionString testDbConnectionStringSCS - initSCSContext (ServerOptionsSCS Dev False Nothing connectionString ("localhost", 8000) DebugS - (Just ("127.0.0.1", 8200)) (Just tempFile)) + ctx <- initSCSContext ( ServerOptionsSCS Dev + False + Nothing + connectionString + ("localhost", 8000) + DebugS + (Just ("127.0.0.1", 8200)) + (Just tempFile) + ) + + errorOnRight =<< dropTablesSimple ctx + errorOnRight =<< migrate ctx + pure ctx + + where + errorOnRight = either (error . show) pure closeConnection :: SCSContext -> IO () closeConnection = destroyAllResources . ST._scsDbConnPool diff --git a/projects/web/deploy/etc/nginx/conf.d/default.conf b/projects/web/deploy/etc/nginx/conf.d/default.conf index 80cf8998..3c3a202a 100644 --- a/projects/web/deploy/etc/nginx/conf.d/default.conf +++ b/projects/web/deploy/etc/nginx/conf.d/default.conf @@ -44,6 +44,7 @@ server { proxy_set_header X-Forwarded-Proto $scheme; } } + server { listen 80; server_name sci041.mirza.d61.io; @@ -57,6 +58,31 @@ server { } } +server { + listen 80; + server_name sci042.mirza.d61.io; + + location / { + proxy_pass http://sci042edapi:8080; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } +} + +server { + listen 80; + server_name sci054.mirza.d61.io; + + location / { + proxy_pass http://sci054edapi:8080; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } +} server { listen 80 default_server;