From a8b3326651ea5cd227c8eb961df2748b2d305bf3 Mon Sep 17 00:00:00 2001 From: Noah Moss <32746338+noahmoss@users.noreply.github.com> Date: Wed, 29 Nov 2023 16:35:31 -0500 Subject: [PATCH 1/9] Change default log level for `publish-event!` to debug (#36213) --- src/metabase/events.clj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/metabase/events.clj b/src/metabase/events.clj index 1698bbca9a9cd..3d768ef171626 100644 --- a/src/metabase/events.clj +++ b/src/metabase/events.clj @@ -123,7 +123,7 @@ {:name "publish-event!.logging" :attributes {}} (let [{:keys [object]} event] - (log/infof "Publishing %s event (name and id):\n\n%s" + (log/debugf "Publishing %s event (name and id):\n\n%s" (u/colorize :yellow (pr-str topic)) (u/pprint-to-str (let [model (mi/model object)] (cond-> (select-keys object [:name :id]) From 532a833a1fbdf7182c5b441b8a319d6b63a147d0 Mon Sep 17 00:00:00 2001 From: Cal Herries <39073188+calherries@users.noreply.github.com> Date: Thu, 30 Nov 2023 10:05:23 +0200 Subject: [PATCH 2/9] Move functions for creating and updating cards from api to models namespace (#36123) --- src/metabase/api/card.clj | 395 +++---------------------- src/metabase/api/collection.clj | 13 +- src/metabase/api/dashboard.clj | 8 +- src/metabase/api/public.clj | 3 +- src/metabase/email/messages.clj | 4 +- src/metabase/models/action.clj | 6 + src/metabase/models/card.clj | 307 ++++++++++++++++++- src/metabase/models/dashboard_card.clj | 16 +- src/metabase/models/pulse.clj | 3 +- src/metabase/pulse/util.clj | 11 +- test/metabase/api/card_test.clj | 20 -- test/metabase/driver_test.clj | 2 - test/metabase/models/card_test.clj | 20 ++ 13 files changed, 404 insertions(+), 404 deletions(-) diff --git a/src/metabase/api/card.clj b/src/metabase/api/card.clj index 2b35e4b3f036e..d4149b1b0bc58 100644 --- a/src/metabase/api/card.clj +++ b/src/metabase/api/card.clj @@ -4,12 +4,9 @@ [cheshire.core :as json] [clj-bom.core :as bom] [clojure.core.async :as a] - [clojure.data :as data] [clojure.data.csv :as csv] [clojure.string :as str] - [clojure.walk :as walk] [compojure.core :refer [DELETE GET POST PUT]] - [malli.core :as mc] [medley.core :as m] [metabase.analytics.snowplow :as snowplow] [metabase.api.common :as api] @@ -19,34 +16,29 @@ [metabase.driver :as driver] [metabase.driver.sync :as driver.s] [metabase.driver.util :as driver.u] - [metabase.email.messages :as messages] [metabase.events :as events] [metabase.lib.types.isa :as lib.types.isa] [metabase.mbql.normalize :as mbql.normalize] [metabase.mbql.util :as mbql.u] [metabase.models - :refer [Card CardBookmark Collection Database PersistedInfo Pulse Table]] + :refer [Card CardBookmark Collection Database PersistedInfo Table]] [metabase.models.card :as card] [metabase.models.collection :as collection] [metabase.models.collection.root :as collection.root] [metabase.models.humanization :as humanization] [metabase.models.interface :as mi] - [metabase.models.moderation-review :as moderation-review] [metabase.models.params :as params] [metabase.models.params.custom-values :as custom-values] [metabase.models.permissions :as perms] [metabase.models.persisted-info :as persisted-info] - [metabase.models.pulse :as pulse] [metabase.models.query :as query] [metabase.models.query.permissions :as query-perms] [metabase.models.revision.last-edit :as last-edit] [metabase.models.timeline :as timeline] [metabase.public-settings :as public-settings] [metabase.public-settings.premium-features :as premium-features] - [metabase.query-processor.async :as qp.async] [metabase.query-processor.card :as qp.card] [metabase.query-processor.pivot :as qp.pivot] - [metabase.query-processor.util :as qp.util] [metabase.related :as related] [metabase.server.middleware.offset-paging :as mw.offset-paging] [metabase.sync :as sync] @@ -61,10 +53,8 @@ [metabase.util.log :as log] [metabase.util.malli :as mu] [metabase.util.malli.schema :as ms] - [schema.core :as s] [toucan2.core :as t2]) (:import - (clojure.core.async.impl.channels ManyToManyChannel) (java.io File))) (set! *warn-on-reflection* true) @@ -158,6 +148,22 @@ card))) cards)))) +(defn hydrate-for-frontend + "Adds additional information to a `Card` selected with toucan that is needed by the frontend. This should be the same information + returned by all API endpoints where the card entity is cached (i.e. GET, PUT, POST) since the frontend replaces the Card + it currently has with returned one -- See #4283" + [card] + (-> card + (t2/hydrate :creator + :dashboard_count + :can_write + :average_query_time + :last_query_start + [:collection :is_personal] + [:moderation_reviews :moderator_details]) + (cond-> ; card + (:dataset card) (t2/hydrate :persisted)))) + (api/defendpoint GET "/:id" "Get `Card` with ID." [id ignore_view] @@ -166,18 +172,12 @@ (let [raw-card (t2/select-one Card :id id) card (-> raw-card api/read-check - (t2/hydrate :creator - :dashboard_count - :parameter_usage_count - :can_write - :average_query_time - :last_query_start - [:collection :is_personal] - [:moderation_reviews :moderator_details]) - collection.root/hydrate-root-collection - (cond-> ;; card - (:dataset raw-card) (t2/hydrate :persisted)) - (last-edit/with-last-edit-info :card))] + hydrate-for-frontend + ;; Cal 2023-11-27: why is parameter_usage_count not hydrated for other endpoints? Maybe it should be + (t2/hydrate :parameter_usage_count) + ;; Cal 2023-11-27: why is last-edit-info hydrated differently for GET vs PUT and POST + (last-edit/with-last-edit-info :card) + collection.root/hydrate-root-collection)] (u/prog1 card (when-not ignore_view (events/publish-event! :event/card-read {:object <> :user-id api/*current-user-id*}))))) @@ -370,59 +370,6 @@ ;;; -------------------------------------------------- Saving Cards -------------------------------------------------- -(s/defn ^:private result-metadata-async :- ManyToManyChannel - "Return a channel of metadata for the passed in `query`. Takes the `original-query` so it can determine if existing - `metadata` might still be valid. Takes `dataset?` since existing metadata might need to be \"blended\" into the - fresh metadata to preserve metadata edits from the dataset. - - Note this condition is possible for new cards and edits to cards. New cards can be created from existing cards by - copying, and they could be datasets, have edited metadata that needs to be blended into a fresh run. - - This is also complicated because everything is optional, so we cannot assume the client will provide metadata and - might need to save a metadata edit, or might need to use db-saved metadata on a modified dataset." - [{:keys [original-query query metadata original-metadata dataset?]}] - (let [valid-metadata? (and metadata (mc/validate qr/ResultsMetadata metadata))] - (cond - (or - ;; query didn't change, preserve existing metadata - (and (= (mbql.normalize/normalize original-query) - (mbql.normalize/normalize query)) - valid-metadata?) - ;; only sent valid metadata in the edit. Metadata might be the same, might be different. We save in either case - (and (nil? query) - valid-metadata?) - - ;; copying card and reusing existing metadata - (and (nil? original-query) - query - valid-metadata?)) - (do - (log/debug (trs "Reusing provided metadata")) - (a/to-chan! [metadata])) - - ;; frontend always sends query. But sometimes programatic don't (cypress, API usage). Returning an empty channel - ;; means the metadata won't be updated at all. - (nil? query) - (do - (log/debug (trs "No query provided so not querying for metadata")) - (doto (a/chan) a/close!)) - - ;; datasets need to incorporate the metadata either passed in or already in the db. Query has changed so we - ;; re-run and blend the saved into the new metadata - (and dataset? (or valid-metadata? (seq original-metadata))) - (do - (log/debug (trs "Querying for metadata and blending model metadata")) - (a/go (let [metadata' (if valid-metadata? - (map mbql.normalize/normalize-source-metadata metadata) - original-metadata) - fresh (a/ms 15)) - -(defn- schedule-metadata-saving - "Save metadata when (and if) it is ready. Takes a chan that will eventually return metadata. Waits up - to [[metadata-async-timeout-ms]] for the metadata, and then saves it if the query of the card has not changed." - [result-metadata-chan card] - (a/go - (let [timeoutc (a/timeout metadata-async-timeout-ms) - [metadata port] (a/alts! [result-metadata-chan timeoutc]) - id (:id card)] - (cond (= port timeoutc) - (do (a/close! result-metadata-chan) - (log/info (trs "Metadata not ready in {0} minutes, abandoning" - (long (/ metadata-async-timeout-ms 1000 60))))) - - (not (seq metadata)) - (log/info (trs "Not updating metadata asynchronously for card {0} because no metadata" - id)) - :else - (future - (let [current-query (t2/select-one-fn :dataset_query Card :id id)] - (if (= (:dataset_query card) current-query) - (do (t2/update! Card id {:result_metadata metadata}) - (log/info (trs "Metadata updated asynchronously for card {0}" id))) - (log/info (trs "Not updating metadata asynchronously for card {0} because query has changed" - id))))))))) - -(defn create-card! - "Create a new Card. Metadata will be fetched off thread. If the metadata takes longer than [[metadata-sync-wait-ms]] - the card will be saved without metadata and it will be saved to the card in the future when it is ready. - - Dispatches the `:card-create` event unless `delay-event?` is true. Useful for when many cards are created in a - transaction and work in the `:card-create` event cannot proceed because the cards would not be visible outside of - the transaction yet. If you pass true here it is important to call the event after the cards are successfully - created." - ([card creator] (create-card! card creator false)) - ([{:keys [dataset_query result_metadata dataset parameters parameter_mappings], :as card-data} creator delay-event?] - ;; `zipmap` instead of `select-keys` because we want to get `nil` values for keys that aren't present. Required by - ;; `api/maybe-reconcile-collection-position!` - (let [data-keys [:dataset_query :description :display :name :visualization_settings - :parameters :parameter_mappings :collection_id :collection_position :cache_ttl] - card-data (assoc (zipmap data-keys (map card-data data-keys)) - :creator_id (:id creator) - :dataset (boolean (:dataset card-data)) - :parameters (or parameters []) - :parameter_mappings (or parameter_mappings [])) - result-metadata-chan (result-metadata-async {:query dataset_query - :metadata result_metadata - :dataset? dataset}) - metadata-timeout (a/timeout metadata-sync-wait-ms) - [metadata port] (a/alts!! [result-metadata-chan metadata-timeout]) - timed-out? (= port metadata-timeout) - card (t2/with-transaction [_conn] - ;; Adding a new card at `collection_position` could cause other cards in this - ;; collection to change position, check that and fix it if needed - (api/maybe-reconcile-collection-position! card-data) - (first (t2/insert-returning-instances! Card (cond-> card-data - (and metadata (not timed-out?)) - (assoc :result_metadata metadata)))))] - (when-not delay-event? - (events/publish-event! :event/card-create {:object card :user-id api/*current-user-id*})) - (when timed-out? - (log/info (trs "Metadata not available soon enough. Saving new card and asynchronously updating metadata"))) - ;; include same information returned by GET /api/card/:id since frontend replaces the Card it currently has with - ;; returned one -- See #4283 - (u/prog1 (-> card - (t2/hydrate :creator - :dashboard_count - :can_write - :average_query_time - :last_query_start - [:collection :is_personal] - [:moderation_reviews :moderator_details]) - (assoc :last-edit-info (last-edit/edit-information-for-user creator))) - (when timed-out? - (schedule-metadata-saving result-metadata-chan <>)))))) +;;; ------------------------------------------------- Creating Cards ------------------------------------------------- (api/defendpoint POST "/" "Create a new `Card`." @@ -548,7 +412,9 @@ saved later when it is ready." (check-data-permissions-for-query dataset_query) ;; check that we have permissions for the collection we're trying to save this card to, if applicable (collection/check-write-perms-for-collection collection_id) - (create-card! body @api/*current-user*)) + (-> (card/create-card! body @api/*current-user*) + hydrate-for-frontend + (assoc :last-edit-info (last-edit/edit-information-for-user @api/*current-user*)))) (api/defendpoint POST "/:id/copy" "Copy a `Card`, with the new name 'Copy of _name_'" @@ -557,8 +423,9 @@ saved later when it is ready." (let [orig-card (api/read-check Card id) new-name (str (trs "Copy of ") (:name orig-card)) new-card (assoc orig-card :name new-name)] - (create-card! new-card @api/*current-user*))) - + (-> (card/create-card! new-card @api/*current-user*) + hydrate-for-frontend + (assoc :last-edit-info (last-edit/edit-information-for-user @api/*current-user*))))) ;;; ------------------------------------------------- Updating Cards ------------------------------------------------- @@ -578,173 +445,6 @@ saved later when it is ready." (validation/check-embedding-enabled) (api/check-superuser))) -(defn- card-archived? [old-card new-card] - (and (not (:archived old-card)) - (:archived new-card))) - -(defn- line-area-bar? [display] - (contains? #{:line :area :bar} display)) - -(defn- progress? [display] - (= :progress display)) - -(defn- allows-rows-alert? [display] - (not (contains? #{:line :bar :area :progress} display))) - -(defn- display-change-broke-alert? - "Alerts no longer make sense when the kind of question being alerted on significantly changes. Setting up an alert - when a time series query reaches 10 is no longer valid if the question switches from a line graph to a table. This - function goes through various scenarios that render an alert no longer valid" - [{old-display :display} {new-display :display}] - (when-not (= old-display new-display) - (or - ;; Did the alert switch from a table type to a line/bar/area/progress graph type? - (and (allows-rows-alert? old-display) - (or (line-area-bar? new-display) - (progress? new-display))) - ;; Switching from a line/bar/area to another type that is not those three invalidates the alert - (and (line-area-bar? old-display) - (not (line-area-bar? new-display))) - ;; Switching from a progress graph to anything else invalidates the alert - (and (progress? old-display) - (not (progress? new-display)))))) - -(defn- goal-missing? - "If we had a goal before, and now it's gone, the alert is no longer valid" - [old-card new-card] - (and - (get-in old-card [:visualization_settings :graph.goal_value]) - (not (get-in new-card [:visualization_settings :graph.goal_value])))) - -(defn- multiple-breakouts? - "If there are multiple breakouts and a goal, we don't know which breakout to compare to the goal, so it invalidates - the alert" - [{:keys [display] :as new-card}] - (and (get-in new-card [:visualization_settings :graph.goal_value]) - (or (line-area-bar? display) - (progress? display)) - (< 1 (count (get-in new-card [:dataset_query :query :breakout]))))) - -(defn- delete-alert-and-notify! - "Removes all of the alerts and notifies all of the email recipients of the alerts change via `NOTIFY-FN!`" - [notify-fn! alerts] - (t2/delete! Pulse :id [:in (map :id alerts)]) - (doseq [{:keys [channels] :as alert} alerts - :let [email-channel (m/find-first #(= :email (:channel_type %)) channels)]] - (doseq [recipient (:recipients email-channel)] - (notify-fn! alert recipient @api/*current-user*)))) - -(defn delete-alert-and-notify-archived! - "Removes all alerts and will email each recipient letting them know" - [alerts] - (delete-alert-and-notify! messages/send-alert-stopped-because-archived-email! alerts)) - -(defn- delete-alert-and-notify-changed! [alerts] - (delete-alert-and-notify! messages/send-alert-stopped-because-changed-email! alerts)) - -(defn- delete-alerts-if-needed! [old-card {card-id :id :as new-card}] - ;; If there are alerts, we need to check to ensure the card change doesn't invalidate the alert - (when-let [alerts (seq (pulse/retrieve-alerts-for-cards {:card-ids [card-id]}))] - (cond - - (card-archived? old-card new-card) - (delete-alert-and-notify-archived! alerts) - - (or (display-change-broke-alert? old-card new-card) - (goal-missing? old-card new-card) - (multiple-breakouts? new-card)) - (delete-alert-and-notify-changed! alerts) - - ;; The change doesn't invalidate the alert, do nothing - :else - nil))) - -(defn- card-is-verified? - "Return true if card is verified, false otherwise. Assumes that moderation reviews are ordered so that the most recent - is the first. This is the case from the hydration function for moderation_reviews." - [card] - (-> card :moderation_reviews first :status #{"verified"} boolean)) - -(defn- changed? - "Return whether there were any changes in the objects at the keys for `consider`. - - returns false because changes to collection_id are ignored: - (changed? #{:description} - {:collection_id 1 :description \"foo\"} - {:collection_id 2 :description \"foo\"}) - - returns true: - (changed? #{:description} - {:collection_id 1 :description \"foo\"} - {:collection_id 2 :description \"diff\"})" - [consider card-before updates] - ;; have to ignore keyword vs strings over api. `{:type :query}` vs `{:type "query"}` - (let [prepare (fn prepare [card] (walk/prewalk (fn [x] (if (keyword? x) - (name x) - x)) - card)) - before (prepare (select-keys card-before consider)) - after (prepare (select-keys updates consider)) - [_ changes-in-after] (data/diff before after)] - (boolean (seq changes-in-after)))) - - - -(def card-compare-keys - "When comparing a card to possibly unverify, only consider these keys as changing something 'important' about the - query." - #{:table_id - :database_id - :query_type ;; these first three may not even be changeable - :dataset_query}) - -(defn- update-card! - "Update a Card. Metadata is fetched asynchronously. If it is ready before [[metadata-sync-wait-ms]] elapses it will be - included, otherwise the metadata will be saved to the database asynchronously." - [{:keys [id] :as card-before-update} card-updates] - ;; don't block our precious core.async thread, run the actual DB updates on a separate thread - (t2/with-transaction [_conn] - (api/maybe-reconcile-collection-position! card-before-update card-updates) - - (when (and (card-is-verified? card-before-update) - (changed? card-compare-keys card-before-update card-updates)) - ;; this is an enterprise feature but we don't care if enterprise is enabled here. If there is a review we need - ;; to remove it regardless if enterprise edition is present at the moment. - (moderation-review/create-review! {:moderated_item_id id - :moderated_item_type "card" - :moderator_id api/*current-user-id* - :status nil - :text (tru "Unverified due to edit")})) - ;; ok, now save the Card - (t2/update! Card id - ;; `collection_id` and `description` can be `nil` (in order to unset them). Other values should only be - ;; modified if they're passed in as non-nil - (u/select-keys-when card-updates - :present #{:collection_id :collection_position :description :cache_ttl :dataset} - :non-nil #{:dataset_query :display :name :visualization_settings :archived :enable_embedding - :parameters :parameter_mappings :embedding_params :result_metadata :collection_preview}))) - ;; Fetch the updated Card from the DB - - (let [card (t2/select-one Card :id id)] - (delete-alerts-if-needed! card-before-update card) - ;; skip publishing the event if it's just a change in its collection position - (when-not (= #{:collection_position} - (set (keys card-updates))) - (events/publish-event! :event/card-update {:object card :user-id api/*current-user-id*})) - ;; include same information returned by GET /api/card/:id since frontend replaces the Card it currently - ;; has with returned one -- See #4142 - (-> card - (t2/hydrate :creator - :dashboard_count - :can_write - :average_query_time - :last_query_start - [:collection :is_personal] - [:moderation_reviews :moderator_details]) - (cond-> ;; card - (:dataset card) (t2/hydrate :persisted)) - (assoc :last-edit-info (last-edit/edit-information-for-user @api/*current-user*))))) - (api/defendpoint PUT "/:id" "Update a `Card`." [id :as {{:keys [dataset_query description display name visualization_settings archived collection_id @@ -775,26 +475,30 @@ saved later when it is ready." check-allowed-to-change-embedding]] (f card-before-update card-updates)) ;; make sure we have the correct `result_metadata` - (let [result-metadata-chan (result-metadata-async {:original-query (:dataset_query card-before-update) - :query dataset_query - :metadata result_metadata - :original-metadata (:result_metadata card-before-update) - :dataset? (if (some? dataset) - dataset - (:dataset card-before-update))}) + (let [result-metadata-chan (card/result-metadata-async {:original-query (:dataset_query card-before-update) + :query dataset_query + :metadata result_metadata + :original-metadata (:result_metadata card-before-update) + :dataset? (if (some? dataset) + dataset + (:dataset card-before-update))}) card-updates (merge card-updates (when dataset {:display :table})) - metadata-timeout (a/timeout metadata-sync-wait-ms) + metadata-timeout (a/timeout card/metadata-sync-wait-ms) [fresh-metadata port] (a/alts!! [result-metadata-chan metadata-timeout]) timed-out? (= port metadata-timeout) card-updates (cond-> card-updates (not timed-out?) (assoc :result_metadata fresh-metadata))] - (u/prog1 (update-card! card-before-update card-updates) + (u/prog1 (-> (card/update-card! {:card-before-update card-before-update + :card-updates card-updates + :actor @api/*current-user*}) + hydrate-for-frontend + (assoc :last-edit-info (last-edit/edit-information-for-user @api/*current-user*))) (when timed-out? (log/info (trs "Metadata not available soon enough. Saving card {0} and asynchronously updating metadata" id)) - (schedule-metadata-saving result-metadata-chan <>)))))) + (card/schedule-metadata-saving result-metadata-chan <>)))))) ;;; ------------------------------------------------- Deleting Cards ------------------------------------------------- @@ -1179,17 +883,16 @@ saved later when it is ready." table-name (str schema-name "." table-name)) stats (upload/load-from-csv! driver db-id schema+table-name csv-file) - ;; Syncs are needed immediately to create the Table and its Fields; the scan is settings-dependent and can be async + ;; Sync immediately to create the Table and its Fields; the scan is settings-dependent and can be async table (sync-tables/create-or-reactivate-table! database {:name table-name :schema (not-empty schema-name)}) - table-id (u/the-id table) - _set_is_upload (t2/update! Table table-id {:is_upload true}) + _set_is_upload (t2/update! Table (u/the-id table) {:is_upload true}) _sync (scan-and-sync-table! database table) - card (create-card! + card (card/create-card! {:collection_id collection-id, :dataset true :database_id db-id :dataset_query {:database db-id - :query {:source-table table-id} + :query {:source-table (u/the-id table)} :type :query} :display :table :name (humanization/name->human-readable-name filename-prefix) @@ -1204,7 +907,7 @@ saved later when it is ready." :upload-seconds upload-seconds} stats)) (.delete csv-file) - card) + (hydrate-for-frontend card)) (catch Throwable e (snowplow/track-event! ::snowplow/csv-upload-failed api/*current-user-id* diff --git a/src/metabase/api/collection.clj b/src/metabase/api/collection.clj index e3f0798adb6ed..926f7da169062 100644 --- a/src/metabase/api/collection.clj +++ b/src/metabase/api/collection.clj @@ -11,14 +11,13 @@ [malli.core :as mc] [malli.transform :as mtx] [medley.core :as m] - [metabase.api.card :as api.card] [metabase.api.common :as api] [metabase.db :as mdb] [metabase.db.query :as mdb.query] [metabase.driver.common.parameters :as params] [metabase.driver.common.parameters.parse :as params.parse] [metabase.mbql.normalize :as mbql.normalize] - [metabase.models.card :refer [Card]] + [metabase.models.card :as card :refer [Card]] [metabase.models.collection :as collection :refer [Collection]] [metabase.models.collection.graph :as graph] [metabase.models.collection.root :as collection.root] @@ -874,15 +873,15 @@ (perms/set-has-full-permissions-for-set? @api/*current-user-permissions-set* (collection/perms-for-archiving collection-before-update))))) -(defn- maybe-send-archived-notificaitons! +(defn- maybe-send-archived-notifications! "When a collection is archived, all of it's cards are also marked as archived, but this is down in the model layer which will not cause the archive notification code to fire. This will delete the relevant alerts and notify the users just as if they had be archived individually via the card API." - [collection-before-update collection-updates] + [& {:keys [collection-before-update collection-updates actor]}] (when (api/column-will-change? :archived collection-before-update collection-updates) (when-let [alerts (seq (pulse/retrieve-alerts-for-cards {:card-ids (t2/select-pks-set Card :collection_id (u/the-id collection-before-update))}))] - (api.card/delete-alert-and-notify-archived! alerts)))) + (card/delete-alert-and-notify-archived! {:alerts alerts :actor actor})))) (api/defendpoint PUT "/:id" "Modify an existing Collection, including archiving or unarchiving it, or moving it." @@ -914,7 +913,9 @@ ;; if we're trying to *move* the Collection (instead or as well) go ahead and do that (move-collection-if-needed! collection-before-update collection-updates) ;; if we *did* end up archiving this Collection, we most post a few notifications - (maybe-send-archived-notificaitons! collection-before-update collection-updates)) + (maybe-send-archived-notifications! {:collection-before-update collection-before-update + :collection-updates collection-updates + :actor @api/*current-user*})) ;; finally, return the updated object (collection-detail (t2/select-one Collection :id id))) diff --git a/src/metabase/api/dashboard.clj b/src/metabase/api/dashboard.clj index bc9a2a56dc478..e1349e3210d67 100644 --- a/src/metabase/api/dashboard.clj +++ b/src/metabase/api/dashboard.clj @@ -7,7 +7,6 @@ [medley.core :as m] [metabase.actions.execution :as actions.execution] [metabase.analytics.snowplow :as snowplow] - [metabase.api.card :as api.card] [metabase.api.common :as api] [metabase.api.common.validation :as validation] [metabase.api.dataset :as api.dataset] @@ -16,7 +15,8 @@ [metabase.mbql.normalize :as mbql.normalize] [metabase.mbql.schema :as mbql.s] [metabase.mbql.util :as mbql.u] - [metabase.models.card :refer [Card]] + [metabase.models.action :as action] + [metabase.models.card :as card :refer [Card]] [metabase.models.collection :as collection] [metabase.models.collection.root :as collection.root] [metabase.models.dashboard :as dashboard :refer [Dashboard]] @@ -251,7 +251,7 @@ [:copied id] (if (:dataset card) card - (api.card/create-card! + (card/create-card! (cond-> (assoc card :collection_id dest-coll-id) same-collection? (update :name #(str % " - " (tru "Duplicate")))) @@ -984,7 +984,7 @@ parameters ms/JSONString} (api/read-check :model/Dashboard dashboard-id) (actions.execution/fetch-values - (api/check-404 (dashboard-card/dashcard->action dashcard-id)) + (api/check-404 (action/dashcard->action dashcard-id)) (json/parse-string parameters))) (api/defendpoint POST "/:dashboard-id/dashcard/:dashcard-id/execute" diff --git a/src/metabase/api/public.clj b/src/metabase/api/public.clj index a45811d1a6226..f267d48a823ed 100644 --- a/src/metabase/api/public.clj +++ b/src/metabase/api/public.clj @@ -20,7 +20,6 @@ [metabase.models.action :as action] [metabase.models.card :as card :refer [Card]] [metabase.models.dashboard :refer [Dashboard]] - [metabase.models.dashboard-card :as dashboard-card] [metabase.models.dimension :refer [Dimension]] [metabase.models.field :refer [Field]] [metabase.models.interface :as mi] @@ -310,7 +309,7 @@ (validation/check-public-sharing-enabled) (api/check-404 (t2/select-one-pk Dashboard :public_uuid uuid :archived false)) (actions.execution/fetch-values - (api/check-404 (dashboard-card/dashcard->action dashcard-id)) + (api/check-404 (action/dashcard->action dashcard-id)) (json/parse-string parameters))) (def ^:private dashcard-execution-throttle (throttle/make-throttler :dashcard-id :attempts-threshold 5000)) diff --git a/src/metabase/email/messages.clj b/src/metabase/email/messages.clj index 25b67cbeea471..abcfa2fe8c656 100644 --- a/src/metabase/email/messages.clj +++ b/src/metabase/email/messages.clj @@ -15,7 +15,6 @@ [metabase.driver.util :as driver.u] [metabase.email :as email] [metabase.models.collection :as collection] - [metabase.models.dashboard :as dashboard] [metabase.models.permissions :as perms] [metabase.models.user :refer [User]] [metabase.public-settings :as public-settings] @@ -322,7 +321,8 @@ :titleUrl (params/dashboard-url dashboard-id (params/parameters pulse dashboard)) :dashboardDescription (:description dashboard) ;; There are legacy pulses that exist without being tied to a dashboard - :dashboardHasTabs (when dashboard-id (dashboard/has-tabs? dashboard-id)) + :dashboardHasTabs (when dashboard-id + (boolean (seq (t2/hydrate dashboard :tabs)))) :creator (-> pulse :creator :common_name) :sectionStyle (style/style (style/section-style)) :notificationText (if (nil? non-user-email) diff --git a/src/metabase/models/action.clj b/src/metabase/models/action.clj index c6b0728aaa0df..23568281adaf9 100644 --- a/src/metabase/models/action.clj +++ b/src/metabase/models/action.clj @@ -341,6 +341,12 @@ (for [dashcard dashcards] (m/assoc-some dashcard :action (get actions-by-id (:action_id dashcard)))))) +(defn dashcard->action + "Get the action associated with a dashcard if exists, return `nil` otherwise." + [dashcard-or-dashcard-id] + (some->> (t2/select-one-fn :action_id :model/DashboardCard :id (u/the-id dashcard-or-dashcard-id)) + (select-action :id))) + ;;; ------------------------------------------------ Serialization --------------------------------------------------- (defmethod serdes/extract-query "Action" [_model _opts] diff --git a/src/metabase/models/card.clj b/src/metabase/models/card.clj index dc2dde2b85f0a..3ddae7477d38f 100644 --- a/src/metabase/models/card.clj +++ b/src/metabase/models/card.clj @@ -2,21 +2,30 @@ "Underlying DB model for what is now most commonly referred to as a 'Question' in most user-facing situations. Card is a historical name, but is the same thing; both terms are used interchangeably in the backend codebase." (:require + [clojure.core.async :as a] + [clojure.data :as data] [clojure.set :as set] [clojure.string :as str] + [clojure.walk :as walk] + [malli.core :as mc] [medley.core :as m] + [metabase.api.common :as api] [metabase.config :as config] [metabase.db.query :as mdb.query] + [metabase.email.messages :as messages] + [metabase.events :as events] [metabase.mbql.normalize :as mbql.normalize] [metabase.models.audit-log :as audit-log] [metabase.models.collection :as collection] [metabase.models.field-values :as field-values] [metabase.models.interface :as mi] + [metabase.models.moderation-review :as moderation-review] [metabase.models.parameter-card :as parameter-card :refer [ParameterCard]] [metabase.models.params :as params] [metabase.models.permissions :as perms] + [metabase.models.pulse :as pulse] [metabase.models.query :as query] [metabase.models.revision :as revision] [metabase.models.serialization :as serdes] @@ -26,14 +35,20 @@ [metabase.public-settings.premium-features :as premium-features :refer [defenterprise]] + [metabase.query-processor.async :as qp.async] [metabase.query-processor.util :as qp.util] [metabase.server.middleware.session :as mw.session] + [metabase.shared.util.i18n :refer [trs]] + [metabase.sync.analyze.query-results :as qr] [metabase.util :as u] [metabase.util.i18n :refer [tru]] [metabase.util.log :as log] [methodical.core :as methodical] + [schema.core :as s] [toucan2.core :as t2] - [toucan2.tools.hydrate :as t2.hydrate])) + [toucan2.tools.hydrate :as t2.hydrate]) + (:import + (clojure.core.async.impl.channels ManyToManyChannel))) (set! *warn-on-reflection* true) @@ -473,6 +488,296 @@ [_card] [:name (serdes/hydrated-hash :collection) :created_at]) +;;; ----------------------------------------------- Creating Cards ---------------------------------------------------- + +(s/defn result-metadata-async :- ManyToManyChannel + "Return a channel of metadata for the passed in `query`. Takes the `original-query` so it can determine if existing + `metadata` might still be valid. Takes `dataset?` since existing metadata might need to be \"blended\" into the + fresh metadata to preserve metadata edits from the dataset. + + Note this condition is possible for new cards and edits to cards. New cards can be created from existing cards by + copying, and they could be datasets, have edited metadata that needs to be blended into a fresh run. + + This is also complicated because everything is optional, so we cannot assume the client will provide metadata and + might need to save a metadata edit, or might need to use db-saved metadata on a modified dataset." + [{:keys [original-query query metadata original-metadata dataset?]}] + (let [valid-metadata? (and metadata (mc/validate qr/ResultsMetadata metadata))] + (cond + (or + ;; query didn't change, preserve existing metadata + (and (= (mbql.normalize/normalize original-query) + (mbql.normalize/normalize query)) + valid-metadata?) + ;; only sent valid metadata in the edit. Metadata might be the same, might be different. We save in either case + (and (nil? query) + valid-metadata?) + + ;; copying card and reusing existing metadata + (and (nil? original-query) + query + valid-metadata?)) + (do + (log/debug (trs "Reusing provided metadata")) + (a/to-chan! [metadata])) + + ;; frontend always sends query. But sometimes programatic don't (cypress, API usage). Returning an empty channel + ;; means the metadata won't be updated at all. + (nil? query) + (do + (log/debug (trs "No query provided so not querying for metadata")) + (doto (a/chan) a/close!)) + + ;; datasets need to incorporate the metadata either passed in or already in the db. Query has changed so we + ;; re-run and blend the saved into the new metadata + (and dataset? (or valid-metadata? (seq original-metadata))) + (do + (log/debug (trs "Querying for metadata and blending model metadata")) + (a/go (let [metadata' (if valid-metadata? + (map mbql.normalize/normalize-source-metadata metadata) + original-metadata) + fresh (a/ms 15)) + +(defn schedule-metadata-saving + "Save metadata when (and if) it is ready. Takes a chan that will eventually return metadata. Waits up + to [[metadata-async-timeout-ms]] for the metadata, and then saves it if the query of the card has not changed." + [result-metadata-chan card] + (a/go + (let [timeoutc (a/timeout metadata-async-timeout-ms) + [metadata port] (a/alts! [result-metadata-chan timeoutc]) + id (:id card)] + (cond (= port timeoutc) + (do (a/close! result-metadata-chan) + (log/info (trs "Metadata not ready in {0} minutes, abandoning" + (long (/ metadata-async-timeout-ms 1000 60))))) + + (not (seq metadata)) + (log/info (trs "Not updating metadata asynchronously for card {0} because no metadata" + id)) + :else + (future + (let [current-query (t2/select-one-fn :dataset_query Card :id id)] + (if (= (:dataset_query card) current-query) + (do (t2/update! Card id {:result_metadata metadata}) + (log/info (trs "Metadata updated asynchronously for card {0}" id))) + (log/info (trs "Not updating metadata asynchronously for card {0} because query has changed" + id))))))))) + +(defn create-card! + "Create a new Card. Metadata will be fetched off thread. If the metadata takes longer than [[metadata-sync-wait-ms]] + the card will be saved without metadata and it will be saved to the card in the future when it is ready. + + Dispatches the `:card-create` event unless `delay-event?` is true. Useful for when many cards are created in a + transaction and work in the `:card-create` event cannot proceed because the cards would not be visible outside of + the transaction yet. If you pass true here it is important to call the event after the cards are successfully + created." + ([card creator] (create-card! card creator false)) + ([{:keys [dataset_query result_metadata dataset parameters parameter_mappings], :as card-data} creator delay-event?] + ;; `zipmap` instead of `select-keys` because we want to get `nil` values for keys that aren't present. Required by + ;; `api/maybe-reconcile-collection-position!` + (let [data-keys [:dataset_query :description :display :name :visualization_settings + :parameters :parameter_mappings :collection_id :collection_position :cache_ttl] + card-data (assoc (zipmap data-keys (map card-data data-keys)) + :creator_id (:id creator) + :dataset (boolean (:dataset card-data)) + :parameters (or parameters []) + :parameter_mappings (or parameter_mappings [])) + result-metadata-chan (result-metadata-async {:query dataset_query + :metadata result_metadata + :dataset? dataset}) + metadata-timeout (a/timeout metadata-sync-wait-ms) + [metadata port] (a/alts!! [result-metadata-chan metadata-timeout]) + timed-out? (= port metadata-timeout) + card (t2/with-transaction [_conn] + ;; Adding a new card at `collection_position` could cause other cards in this + ;; collection to change position, check that and fix it if needed + (api/maybe-reconcile-collection-position! card-data) + (first (t2/insert-returning-instances! Card (cond-> card-data + (and metadata (not timed-out?)) + (assoc :result_metadata metadata)))))] + (when-not delay-event? + (events/publish-event! :event/card-create {:object card :user-id (:id creator)})) + (when timed-out? + (log/info (trs "Metadata not available soon enough. Saving new card and asynchronously updating metadata"))) + ;; include same information returned by GET /api/card/:id since frontend replaces the Card it currently has with + ;; returned one -- See #4283 + (u/prog1 card + (when timed-out? + (schedule-metadata-saving result-metadata-chan <>)))))) + +;;; ------------------------------------------------- Updating Cards ------------------------------------------------- + +(defn- card-archived? [old-card new-card] + (and (not (:archived old-card)) + (:archived new-card))) + +(defn- line-area-bar? [display] + (contains? #{:line :area :bar} display)) + +(defn- progress? [display] + (= :progress display)) + +(defn- allows-rows-alert? [display] + (not (contains? #{:line :bar :area :progress} display))) + +(defn- display-change-broke-alert? + "Alerts no longer make sense when the kind of question being alerted on significantly changes. Setting up an alert + when a time series query reaches 10 is no longer valid if the question switches from a line graph to a table. This + function goes through various scenarios that render an alert no longer valid" + [{old-display :display} {new-display :display}] + (when-not (= old-display new-display) + (or + ;; Did the alert switch from a table type to a line/bar/area/progress graph type? + (and (allows-rows-alert? old-display) + (or (line-area-bar? new-display) + (progress? new-display))) + ;; Switching from a line/bar/area to another type that is not those three invalidates the alert + (and (line-area-bar? old-display) + (not (line-area-bar? new-display))) + ;; Switching from a progress graph to anything else invalidates the alert + (and (progress? old-display) + (not (progress? new-display)))))) + +(defn- goal-missing? + "If we had a goal before, and now it's gone, the alert is no longer valid" + [old-card new-card] + (and + (get-in old-card [:visualization_settings :graph.goal_value]) + (not (get-in new-card [:visualization_settings :graph.goal_value])))) + +(defn- multiple-breakouts? + "If there are multiple breakouts and a goal, we don't know which breakout to compare to the goal, so it invalidates + the alert" + [{:keys [display] :as new-card}] + (and (get-in new-card [:visualization_settings :graph.goal_value]) + (or (line-area-bar? display) + (progress? display)) + (< 1 (count (get-in new-card [:dataset_query :query :breakout]))))) + +(defn- delete-alert-and-notify! + "Removes all of the alerts and notifies all of the email recipients of the alerts change via `NOTIFY-FN!`" + [& {:keys [notify-fn! alerts actor]}] + (t2/delete! :model/Pulse :id [:in (map :id alerts)]) + (doseq [{:keys [channels] :as alert} alerts + :let [email-channel (m/find-first #(= :email (:channel_type %)) channels)]] + (doseq [recipient (:recipients email-channel)] + (notify-fn! alert recipient actor)))) + +(defn delete-alert-and-notify-archived! + "Removes all alerts and will email each recipient letting them know" + [& {:keys [alerts actor]}] + (delete-alert-and-notify! {:notify-fn! messages/send-alert-stopped-because-archived-email! + :alerts alerts + :actor actor})) + +(defn- delete-alert-and-notify-changed! [& {:keys [alerts actor]}] + (delete-alert-and-notify! {:notify-fn! messages/send-alert-stopped-because-changed-email! + :alerts alerts + :actor actor})) + +(defn- delete-alerts-if-needed! [& {:keys [old-card new-card actor]}] + ;; If there are alerts, we need to check to ensure the card change doesn't invalidate the alert + (when-let [alerts (seq (pulse/retrieve-alerts-for-cards {:card-ids [(:id new-card)]}))] + (cond + + (card-archived? old-card new-card) + (delete-alert-and-notify-archived! :alerts alerts, :actor actor) + + (or (display-change-broke-alert? old-card new-card) + (goal-missing? old-card new-card) + (multiple-breakouts? new-card)) + (delete-alert-and-notify-changed! :alerts alerts, :actor actor) + + ;; The change doesn't invalidate the alert, do nothing + :else + nil))) + +(defn- card-is-verified? + "Return true if card is verified, false otherwise. Assumes that moderation reviews are ordered so that the most recent + is the first. This is the case from the hydration function for moderation_reviews." + [card] + (-> card :moderation_reviews first :status #{"verified"} boolean)) + +(defn- changed? + "Return whether there were any changes in the objects at the keys for `consider`. + + returns false because changes to collection_id are ignored: + (changed? #{:description} + {:collection_id 1 :description \"foo\"} + {:collection_id 2 :description \"foo\"}) + + returns true: + (changed? #{:description} + {:collection_id 1 :description \"foo\"} + {:collection_id 2 :description \"diff\"})" + [consider card-before updates] + ;; have to ignore keyword vs strings over api. `{:type :query}` vs `{:type "query"}` + (let [prepare (fn prepare [card] (walk/prewalk (fn [x] (if (keyword? x) + (name x) + x)) + card)) + before (prepare (select-keys card-before consider)) + after (prepare (select-keys updates consider)) + [_ changes-in-after] (data/diff before after)] + (boolean (seq changes-in-after)))) + +(def ^:private card-compare-keys + "When comparing a card to possibly unverify, only consider these keys as changing something 'important' about the + query." + #{:table_id + :database_id + :query_type ;; these first three may not even be changeable + :dataset_query}) + +(defn update-card! + "Update a Card. Metadata is fetched asynchronously. If it is ready before [[metadata-sync-wait-ms]] elapses it will be + included, otherwise the metadata will be saved to the database asynchronously." + [{:keys [card-before-update card-updates actor]}] + ;; don't block our precious core.async thread, run the actual DB updates on a separate thread + (t2/with-transaction [_conn] + (api/maybe-reconcile-collection-position! card-before-update card-updates) + + (when (and (card-is-verified? card-before-update) + (changed? card-compare-keys card-before-update card-updates)) + ;; this is an enterprise feature but we don't care if enterprise is enabled here. If there is a review we need + ;; to remove it regardless if enterprise edition is present at the moment. + (moderation-review/create-review! {:moderated_item_id (:id card-before-update) + :moderated_item_type "card" + :moderator_id (:id actor) + :status nil + :text (tru "Unverified due to edit")})) + ;; ok, now save the Card + (t2/update! Card (:id card-before-update) + ;; `collection_id` and `description` can be `nil` (in order to unset them). Other values should only be + ;; modified if they're passed in as non-nil + (u/select-keys-when card-updates + :present #{:collection_id :collection_position :description :cache_ttl :dataset} + :non-nil #{:dataset_query :display :name :visualization_settings :archived :enable_embedding + :parameters :parameter_mappings :embedding_params :result_metadata :collection_preview}))) + ;; Fetch the updated Card from the DB + (let [card (t2/select-one Card :id (:id card-before-update))] + (delete-alerts-if-needed! :old-card card-before-update, :new-card card, :actor actor) + ;; skip publishing the event if it's just a change in its collection position + (when-not (= #{:collection_position} + (set (keys card-updates))) + (events/publish-event! :event/card-update {:object card :user-id api/*current-user-id*})) + card)) + ;;; ------------------------------------------------- Serialization -------------------------------------------------- (defmethod serdes/extract-query "Card" [_ opts] diff --git a/src/metabase/models/dashboard_card.clj b/src/metabase/models/dashboard_card.clj index 56d1fa15cc4bb..18523eb562eb0 100644 --- a/src/metabase/models/dashboard_card.clj +++ b/src/metabase/models/dashboard_card.clj @@ -4,8 +4,6 @@ [medley.core :as m] [metabase.db :as mdb] [metabase.db.query :as mdb.query] - [metabase.models.action :as action] - [metabase.models.card :refer [Card]] [metabase.models.dashboard-card-series :refer [DashboardCardSeries]] [metabase.models.interface :as mi] [metabase.models.pulse-card :refer [PulseCard]] @@ -48,7 +46,7 @@ (defmethod mi/perms-objects-set :model/DashboardCard [dashcard read-or-write] (let [card (or (:card dashcard) - (t2/select-one [Card :dataset_query] :id (u/the-id (:card_id dashcard)))) + (t2/select-one [:model/Card :dataset_query] :id (u/the-id (:card_id dashcard)))) series (or (:series dashcard) (series dashcard))] (apply set/union (mi/perms-objects-set card read-or-write) (for [series-card series] @@ -108,12 +106,6 @@ ;;; ---------------------------------------------------- CRUD FNS ---------------------------------------------------- -(defn dashcard->action - "Get the action associated with a dashcard if exists, return `nil` otherwise." - [dashcard-or-dashcard-id] - (some->> (t2/select-one-fn :action_id :model/DashboardCard :id (u/the-id dashcard-or-dashcard-id)) - (action/select-action :id))) - (mu/defn retrieve-dashboard-card "Fetch a single DashboardCard by its ID value." [id :- ms/PositiveInt] @@ -380,9 +372,9 @@ [dashcard] (-> dashcard (dissoc :serdes/meta) - (update :card_id serdes/*import-fk* 'Card) - (update :action_id serdes/*import-fk* 'Action) - (update :dashboard_id serdes/*import-fk* 'Dashboard) + (update :card_id serdes/*import-fk* :model/Card) + (update :action_id serdes/*import-fk* :model/Action) + (update :dashboard_id serdes/*import-fk* :model/Dashboard) (update :dashboard_tab_id serdes/*import-fk* :model/DashboardTab) (update :created_at #(if (string? %) (u.date/parse %) %)) (update :parameter_mappings serdes/import-parameter-mappings) diff --git a/src/metabase/models/pulse.clj b/src/metabase/models/pulse.clj index 29688675192a2..d01fcc709e6b9 100644 --- a/src/metabase/models/pulse.clj +++ b/src/metabase/models/pulse.clj @@ -23,7 +23,6 @@ [medley.core :as m] [metabase.api.common :as api] [metabase.events :as events] - [metabase.models.card :refer [Card]] [metabase.models.collection :as collection] [metabase.models.interface :as mi] [metabase.models.permissions :as perms] @@ -225,7 +224,7 @@ (mu/defn ^:private cards* :- [:sequential HybridPulseCard] [notification-or-id] (t2/select - Card + :model/Card {:select [:c.id :c.name :c.description :c.collection_id :c.display :pc.include_csv :pc.include_xls :pc.dashboard_card_id :dc.dashboard_id [nil :parameter_mappings]] ;; :dc.parameter_mappings - how do you select this? :from [[:pulse :p]] diff --git a/src/metabase/pulse/util.clj b/src/metabase/pulse/util.clj index 18c926a7ae321..cbce6483254bb 100644 --- a/src/metabase/pulse/util.clj +++ b/src/metabase/pulse/util.clj @@ -1,10 +1,7 @@ (ns metabase.pulse.util "Utils for pulses." (:require - [metabase.models.card :refer [Card]] - [metabase.models.dashboard-card - :as dashboard-card - :refer [DashboardCard]] + [metabase.models.dashboard-card :as dashboard-card] [metabase.query-processor :as qp] [metabase.query-processor.middleware.permissions :as qp.perms] [metabase.server.middleware.session :as mw.session] @@ -21,7 +18,7 @@ {:pre [(integer? pulse-creator-id)]} (let [card-id (u/the-id card-or-id)] (try - (when-let [{query :dataset_query, :as card} (t2/select-one Card :id card-id, :archived false)] + (when-let [{query :dataset_query, :as card} (t2/select-one :model/Card :id card-id, :archived false)] (let [query (assoc query :async? false) process-query (fn [] (binding [qp.perms/*card-id* card-id] @@ -49,8 +46,8 @@ [card-or-id dashcard-or-id] (let [card-id (u/the-id card-or-id) dashcard-id (u/the-id dashcard-or-id) - card (t2/select-one Card :id card-id, :archived false) - dashcard (t2/select-one DashboardCard :id dashcard-id) + card (t2/select-one :model/Card :id card-id, :archived false) + dashcard (t2/select-one :model/DashboardCard :id dashcard-id) multi-cards (dashboard-card/dashcard->multi-cards dashcard)] (for [multi-card multi-cards] (execute-card {:creator_id (:creator_id card)} (:id multi-card))))) diff --git a/test/metabase/api/card_test.clj b/test/metabase/api/card_test.clj index 24111c31748d8..6717692d0e7e1 100644 --- a/test/metabase/api/card_test.clj +++ b/test/metabase/api/card_test.clj @@ -1990,26 +1990,6 @@ :collections (collection-names cards-or-card-ids))) -(deftest changed?-test - (letfn [(changed? [before after] - (#'api.card/changed? api.card/card-compare-keys before after))] - (testing "Ignores keyword/string" - (is (false? (changed? {:dataset_query {:type :query}} {:dataset_query {:type "query"}})))) - (testing "Ignores properties not in `api.card/card-compare-keys" - (is (false? (changed? {:collection_id 1 - :collection_position 0} - {:collection_id 2 - :collection_position 1})))) - (testing "Sees changes" - (is (true? (changed? {:dataset_query {:type :query}} - {:dataset_query {:type :query - :query {}}}))) - (testing "But only when they are different in the after, not just omitted" - (is (false? (changed? {:dataset_query {} :collection_id 1} - {:collection_id 1}))) - (is (true? (changed? {:dataset_query {} :collection_id 1} - {:dataset_query nil :collection_id 1}))))))) - (deftest update-verified-card-test (tools.macro/macrolet [(with-card [verified & body] diff --git a/test/metabase/driver_test.clj b/test/metabase/driver_test.clj index e6386cf771c8d..3caea41014ebb 100644 --- a/test/metabase/driver_test.clj +++ b/test/metabase/driver_test.clj @@ -84,8 +84,6 @@ (mt/test-drivers (->> (mt/normal-drivers) ;; athena is a special case because connections aren't made with a single database, ;; but to an S3 bucket that may contain many databases - ;; TODO: other drivers are still failing with this test. For these drivers there's a good chance there's a bug in - ;; test.data. code, and not with the driver itself. (remove #{:athena :oracle :vertica :presto-jdbc})) (let [database-name (mt/random-name) dbdef (basic-db-definition database-name)] diff --git a/test/metabase/models/card_test.clj b/test/metabase/models/card_test.clj index e6d8ec8f3c1cb..2c7a857208cd6 100644 --- a/test/metabase/models/card_test.clj +++ b/test/metabase/models/card_test.clj @@ -788,3 +788,23 @@ ;; we store version of metabase which created the card (is (= config/mb-version-string (t2/select-one-fn :metabase_version :model/Card :id (:id card))))))) + +(deftest changed?-test + (letfn [(changed? [before after] + (#'card/changed? @#'card/card-compare-keys before after))] + (testing "Ignores keyword/string" + (is (false? (changed? {:dataset_query {:type :query}} {:dataset_query {:type "query"}})))) + (testing "Ignores properties not in `api.card/card-compare-keys" + (is (false? (changed? {:collection_id 1 + :collection_position 0} + {:collection_id 2 + :collection_position 1})))) + (testing "Sees changes" + (is (true? (changed? {:dataset_query {:type :query}} + {:dataset_query {:type :query + :query {}}}))) + (testing "But only when they are different in the after, not just omitted" + (is (false? (changed? {:dataset_query {} :collection_id 1} + {:collection_id 1}))) + (is (true? (changed? {:dataset_query {} :collection_id 1} + {:dataset_query nil :collection_id 1}))))))) From dc7c10735b0e845e786bf5dc0407048588d2d8de Mon Sep 17 00:00:00 2001 From: Fernando Brito Date: Thu, 30 Nov 2023 10:10:36 +0100 Subject: [PATCH 3/9] Fix query remarks on Snowflake (#29888) * Fix query remarks on Snowflake * Move docstrings out of defmethod * Add test to Snowflake query remarks * Improve test on Snowflake query remark * Remove unnecessary comment --- docs/developers-guide/driver-changelog.md | 3 ++ .../src/metabase/driver/snowflake.clj | 26 +++++++++++ .../test/metabase/driver/snowflake_test.clj | 44 +++++++++++++++++++ src/metabase/driver/sql_jdbc/execute.clj | 15 ++++++- 4 files changed, 87 insertions(+), 1 deletion(-) diff --git a/docs/developers-guide/driver-changelog.md b/docs/developers-guide/driver-changelog.md index dc97e0d7613a1..2f32543df70c7 100644 --- a/docs/developers-guide/driver-changelog.md +++ b/docs/developers-guide/driver-changelog.md @@ -75,6 +75,9 @@ title: Driver interface changelog will be used to call the corresponding `metabase.driver.sql.query-processor/date` implementation to convert the `field`. Returns `nil` if the conversion is not necessary for this `field` and `param-type` combination. +- The multimethod `metabase.driver.sql-jdbc.execute/inject-remark` has been added. It allows JDBC-based drivers to + override the default behavior of how SQL query remarks are added to queries (prepending them as a comment). + ## Metabase 0.47.0 - A new driver feature has been added: `:schemas`. This feature signals whether the database organizes tables in diff --git a/modules/drivers/snowflake/src/metabase/driver/snowflake.clj b/modules/drivers/snowflake/src/metabase/driver/snowflake.clj index 44ffc889ef61b..dc82c14df93f4 100644 --- a/modules/drivers/snowflake/src/metabase/driver/snowflake.clj +++ b/modules/drivers/snowflake/src/metabase/driver/snowflake.clj @@ -1,6 +1,8 @@ (ns metabase.driver.snowflake "Snowflake Driver." (:require + [buddy.core.codecs :as codecs] + [cheshire.core :as json] [clojure.java.jdbc :as jdbc] [clojure.set :as set] [clojure.string :as str] @@ -24,9 +26,11 @@ [metabase.driver.sync :as driver.s] [metabase.lib.metadata :as lib.metadata] [metabase.models.secret :as secret] + [metabase.public-settings :as public-settings] [metabase.query-processor.error-type :as qp.error-type] [metabase.query-processor.store :as qp.store] [metabase.query-processor.timezone :as qp.timezone] + [metabase.query-processor.util :as qp.util] [metabase.query-processor.util.add-alias-info :as add] [metabase.util :as u] [metabase.util.date-2 :as u.date] @@ -587,6 +591,28 @@ [driver ps i t] (sql-jdbc.execute/set-parameter driver ps i (t/sql-timestamp (t/with-zone-same-instant t (t/zone-id "UTC"))))) +;;; --------------------------------------------------- Query remarks --------------------------------------------------- + +; Snowflake strips comments prepended to the SQL statement (default remark injection behavior). We should append the +; remark instead. +(defmethod sql-jdbc.execute/inject-remark :snowflake + [_ sql remark] + (str sql "\n\n-- " remark)) + +(defmethod qp.util/query->remark :snowflake + [_ {{:keys [context executed-by card-id pulse-id dashboard-id query-hash]} :info, + query-type :type, + database-id :database}] + (json/generate-string {:client "Metabase" + :context context + :queryType query-type + :userId executed-by + :pulseId pulse-id + :cardId card-id + :dashboardId dashboard-id + :databaseId database-id + :queryHash (when (bytes? query-hash) (codecs/bytes->hex query-hash)) + :serverId (public-settings/site-uuid)})) ;;; ------------------------------------------------- User Impersonation -------------------------------------------------- diff --git a/modules/drivers/snowflake/test/metabase/driver/snowflake_test.clj b/modules/drivers/snowflake/test/metabase/driver/snowflake_test.clj index cb14930f00769..48608d3780cb7 100644 --- a/modules/drivers/snowflake/test/metabase/driver/snowflake_test.clj +++ b/modules/drivers/snowflake/test/metabase/driver/snowflake_test.clj @@ -1,5 +1,6 @@ (ns metabase.driver.snowflake-test (:require + [clojure.data.json :as json] [clojure.java.jdbc :as jdbc] [clojure.set :as set] [clojure.string :as str] @@ -7,8 +8,10 @@ [metabase.driver :as driver] [metabase.driver.sql :as driver.sql] [metabase.driver.sql-jdbc.connection :as sql-jdbc.conn] + [metabase.driver.sql-jdbc.execute :as sql-jdbc.execute] [metabase.models :refer [Table]] [metabase.models.database :refer [Database]] + [metabase.public-settings :as public-settings] [metabase.query-processor :as qp] [metabase.sync :as sync] [metabase.sync.util :as sync-util] @@ -26,6 +29,17 @@ (set! *warn-on-reflection* true) +(defn- query->native [query] + (let [check-sql-fn (fn [_ _ sql & _] + (throw (ex-info "done" {::native-query sql})))] + (with-redefs [sql-jdbc.execute/prepared-statement check-sql-fn + sql-jdbc.execute/execute-statement! check-sql-fn] + (try + (qp/process-query query) + (is false "no statement created") + (catch Exception e + (-> e u/all-ex-data ::native-query)))))) + (use-fixtures :each (fn [thunk] ;; 1. If sync fails when loading a test dataset, don't swallow the error; throw an Exception so we ;; can debug it. This is much less confusing when trying to fix broken tests. @@ -298,3 +312,33 @@ ;; Special characters (is (= "USE ROLE \"Role.123\";" (driver.sql/set-role-statement :snowflake "Role.123"))) (is (= "USE ROLE \"$role\";" (driver.sql/set-role-statement :snowflake "$role"))))) + +(deftest remark-test + (testing "Queries should have a remark formatted as JSON appended to them with additional metadata" + (mt/test-driver :snowflake + (let [expected-map {"pulseId" nil + "serverId" (public-settings/site-uuid) + "client" "Metabase" + "queryHash" "cb83d4f6eedc250edb0f2c16f8d9a21e5d42f322ccece1494c8ef3d634581fe2" + "queryType" "query" + "cardId" 1234 + "dashboardId" 5678 + "context" "ad-hoc" + "userId" 1000 + "databaseId" (mt/id)} + result-query (driver/prettify-native-form :snowflake + (query->native + (assoc + (mt/mbql-query users {:limit 2000}) + :parameters [{:type "id" + :target [:dimension [:field (mt/id :users :id) nil]] + :value ["1" "2" "3"]}] + :info {:executed-by 1000 + :card-id 1234 + :dashboard-id 5678 + :context :ad-hoc + :query-hash (byte-array [-53 -125 -44 -10 -18 -36 37 14 -37 15 44 22 -8 -39 -94 30 + 93 66 -13 34 -52 -20 -31 73 76 -114 -13 -42 52 88 31 -30])}))) + result-comment (second (re-find #"-- (\{.*\})" result-query)) + result-map (json/read-str result-comment)] + (is (= expected-map result-map)))))) diff --git a/src/metabase/driver/sql_jdbc/execute.clj b/src/metabase/driver/sql_jdbc/execute.clj index 1185ceb53486a..8e78a00117619 100644 --- a/src/metabase/driver/sql_jdbc/execute.clj +++ b/src/metabase/driver/sql_jdbc/execute.clj @@ -662,13 +662,26 @@ (let [row-thunk (row-thunk driver rs rsmeta)] (qp.reducible/reducible-rows row-thunk canceled-chan))) +(defmulti inject-remark + "Injects the remark into the SQL query text." + {:added "0.48.0", :arglists '([driver sql remark])} + driver/dispatch-on-initialized-driver + :hierarchy #'driver/hierarchy) + +; Combines the original SQL query with query remarks. Most databases using sql-jdbc based drivers support prepending the +; remark to the SQL statement, so we have it as a default. However, some drivers do not support it, so we allow it to +; be overriden. +(defmethod inject-remark :default + [_ sql remark] + (str "-- " remark "\n" sql)) + (defn execute-reducible-query "Default impl of `execute-reducible-query` for sql-jdbc drivers." {:added "0.35.0", :arglists '([driver query context respond] [driver sql params max-rows context respond])} ([driver {{sql :query, params :params} :native, :as outer-query} context respond] {:pre [(string? sql) (seq sql)]} (let [remark (qp.util/query->remark driver outer-query) - sql (str "-- " remark "\n" sql) + sql (inject-remark driver sql remark) max-rows (limit/determine-query-max-rows outer-query)] (execute-reducible-query driver sql params max-rows context respond))) From 24bed2505912816b4e87eb2509aaf255ae503de7 Mon Sep 17 00:00:00 2001 From: Nemanja Glumac <31325167+nemanjaglumac@users.noreply.github.com> Date: Thu, 30 Nov 2023 11:50:38 +0100 Subject: [PATCH 4/9] Fix E2E replay stray job dependency (#36219) * Fix stray job dependency * Run every two hours --- .github/workflows/e2e-tests-replay.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/e2e-tests-replay.yml b/.github/workflows/e2e-tests-replay.yml index bc0cd06eb6491..230c516d2398d 100644 --- a/.github/workflows/e2e-tests-replay.yml +++ b/.github/workflows/e2e-tests-replay.yml @@ -3,7 +3,7 @@ name: E2E Tests using Replay.io browser on: # We'll record runs using Replay.io and their browser on a schedule as an experiment schedule: - - cron: '0 */3 * * *' + - cron: '0 */2 * * *' jobs: e2e-matrix-builder: @@ -19,7 +19,7 @@ jobs: e2e-tests: - needs: [test-run-id, e2e-matrix-builder] + needs: e2e-matrix-builder runs-on: ${{ matrix.runner }} timeout-minutes: 90 name: e2e-tests-${{ matrix.name }}-${{ matrix.edition }} From 676fcd8c9816d9ed6e4516ef5975bb649824b256 Mon Sep 17 00:00:00 2001 From: Ngoc Khuat Date: Thu, 30 Nov 2023 19:36:50 +0700 Subject: [PATCH 5/9] Install clojure before running clj-kondo (#36222) --- .github/workflows/backend.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 3dc59bcf32009..cb94d78d38f2a 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -95,6 +95,10 @@ jobs: DOWNLOAD_URL: https://github.com/clj-kondo/clj-kondo/releases/download steps: - uses: actions/checkout@v3 + - name: Prepare back-end environment + uses: ./.github/actions/prepare-backend + with: + m2-cache-key: 'kondo' - name: Install clj-kondo run: | curl -OL ${DOWNLOAD_URL}/v${CLJ_KONDO_VERSION}/clj-kondo-${CLJ_KONDO_VERSION}-linux-static-amd64.zip From 9015166747476c56f191ea84572c715290fb1e0d Mon Sep 17 00:00:00 2001 From: Cal Herries <39073188+calherries@users.noreply.github.com> Date: Thu, 30 Nov 2023 15:02:58 +0200 Subject: [PATCH 6/9] Extend "Add check for connection status before sync" to presto-jdbc, oracle, and vertica (#36119) --- .../metabase/driver/bigquery_cloud_sdk.clj | 38 ++++++------- .../metabase/test/data/bigquery_cloud_sdk.clj | 7 ++- .../src/metabase/driver/presto_jdbc.clj | 8 +++ test/metabase/driver_test.clj | 55 ++++++++++++++++--- 4 files changed, 79 insertions(+), 29 deletions(-) diff --git a/modules/drivers/bigquery-cloud-sdk/src/metabase/driver/bigquery_cloud_sdk.clj b/modules/drivers/bigquery-cloud-sdk/src/metabase/driver/bigquery_cloud_sdk.clj index cb084f22dfacd..f09e7c68c24b0 100644 --- a/modules/drivers/bigquery-cloud-sdk/src/metabase/driver/bigquery_cloud_sdk.clj +++ b/modules/drivers/bigquery-cloud-sdk/src/metabase/driver/bigquery_cloud_sdk.clj @@ -52,9 +52,9 @@ '("https://www.googleapis.com/auth/bigquery" "https://www.googleapis.com/auth/drive")) -(defn- database->client - ^BigQuery [database] - (let [creds (bigquery.common/database-details->service-account-credential (:details database)) +(defn- database-details->client + ^BigQuery [details] + (let [creds (bigquery.common/database-details->service-account-credential details) bq-bldr (doto (BigQueryOptions/newBuilder) (.setCredentials (.createScoped creds bigquery-scopes)))] (.. bq-bldr build getService))) @@ -65,25 +65,23 @@ (defn- list-tables "Fetch all tables (new pages are loaded automatically by the API)." - (^Iterable [database] - (list-tables database false)) - (^Iterable [{{:keys [project-id dataset-filters-type dataset-filters-patterns]} :details, :as database} validate-dataset?] - (list-tables (database->client database) - (or project-id (bigquery.common/database-details->credential-project-id (:details database))) - dataset-filters-type - dataset-filters-patterns - (boolean validate-dataset?))) - (^Iterable [^BigQuery client ^String project-id ^String filter-type ^String filter-patterns ^Boolean validate-dataset?] - (let [datasets (.listDatasets client project-id (u/varargs BigQuery$DatasetListOption)) - inclusion-patterns (when (= "inclusion" filter-type) filter-patterns) - exclusion-patterns (when (= "exclusion" filter-type) filter-patterns) + (^Iterable [database-details] + (list-tables database-details {:validate-dataset? false})) + (^Iterable [{:keys [project-id dataset-filters-type dataset-filters-patterns] :as details} {:keys [validate-dataset?]}] + (let [client (database-details->client details) + project-id (or project-id (bigquery.common/database-details->credential-project-id details)) + datasets (.listDatasets client project-id (u/varargs BigQuery$DatasetListOption)) + inclusion-patterns (when (= "inclusion" dataset-filters-type) dataset-filters-patterns) + exclusion-patterns (when (= "exclusion" dataset-filters-type) dataset-filters-patterns) dataset-iter (for [^Dataset dataset (.iterateAll datasets) :let [^DatasetId dataset-id (.. dataset getDatasetId)] :when (driver.s/include-schema? inclusion-patterns exclusion-patterns (.getDataset dataset-id))] dataset-id)] - (when (and (not= filter-type "all") validate-dataset? (zero? (count dataset-iter))) + (when (and (not= dataset-filters-type "all") + validate-dataset? + (zero? (count dataset-iter))) (throw (ex-info (tru "Looks like we cannot find any matching datasets.") {::driver/can-connect-message? true}))) (apply concat (for [^DatasetId dataset-id dataset-iter] @@ -94,7 +92,7 @@ (defmethod driver/describe-database :bigquery-cloud-sdk [_ database] - (let [tables (list-tables database)] + (let [tables (list-tables (:details database))] {:tables (set (for [^Table table tables :let [^TableId table-id (.getTableId table) ^String dataset-id (.getDataset table-id)]] @@ -103,7 +101,7 @@ (defmethod driver/can-connect? :bigquery-cloud-sdk [_ details-map] ;; check whether we can connect by seeing whether listing tables succeeds - (try (some? (list-tables {:details details-map} ::validate-dataset)) + (try (some? (list-tables details-map {:validate-dataset? true})) (catch Exception e (when (::driver/can-connect-message? (ex-data e)) (throw e)) @@ -115,7 +113,7 @@ (mu/defn ^:private get-table :- (ms/InstanceOfClass Table) (^Table [{{:keys [project-id]} :details, :as database} dataset-id table-id] - (get-table (database->client database) project-id dataset-id table-id)) + (get-table (database-details->client (:details database)) project-id dataset-id table-id)) (^Table [^BigQuery client :- (ms/InstanceOfClass BigQuery) project-id :- [:maybe ::lib.schema.common/non-blank-string] @@ -297,7 +295,7 @@ (defn- execute-bigquery-on-db ^TableResult [database sql parameters cancel-chan cancel-requested?] (execute-bigquery - (database->client database) + (database-details->client (:details database)) sql parameters cancel-chan diff --git a/modules/drivers/bigquery-cloud-sdk/test/metabase/test/data/bigquery_cloud_sdk.clj b/modules/drivers/bigquery-cloud-sdk/test/metabase/test/data/bigquery_cloud_sdk.clj index 815913cd7befc..7ed465ac733ff 100644 --- a/modules/drivers/bigquery-cloud-sdk/test/metabase/test/data/bigquery_cloud_sdk.clj +++ b/modules/drivers/bigquery-cloud-sdk/test/metabase/test/data/bigquery_cloud_sdk.clj @@ -69,7 +69,7 @@ (defn- bigquery "Get an instance of a `Bigquery` client." ^BigQuery [] - (#'bigquery/database->client {:details (test-db-details)})) + (#'bigquery/database-details->client (test-db-details))) (defn project-id "BigQuery project ID that we're using for tests, either from the env var `MB_BIGQUERY_TEST_PROJECT_ID`, or if that is @@ -82,7 +82,10 @@ (defmethod tx/dbdef->connection-details :bigquery-cloud-sdk [_driver _context {:keys [database-name]}] - (assoc (test-db-details) :dataset-id (test-dataset-id database-name) :include-user-id-and-hash true)) + (assoc (test-db-details) + :dataset-filters-type "inclusion" + :dataset-filters-patterns (test-dataset-id database-name) + :include-user-id-and-hash true)) ;;; -------------------------------------------------- Loading Data -------------------------------------------------- diff --git a/modules/drivers/presto-jdbc/src/metabase/driver/presto_jdbc.clj b/modules/drivers/presto-jdbc/src/metabase/driver/presto_jdbc.clj index 5312e69fa54cf..4bc561f556d23 100644 --- a/modules/drivers/presto-jdbc/src/metabase/driver/presto_jdbc.clj +++ b/modules/drivers/presto-jdbc/src/metabase/driver/presto_jdbc.clj @@ -612,6 +612,14 @@ [_driver _database _table] nil) +(defmethod driver/can-connect? :presto-jdbc + [driver {:keys [catalog], :as details}] + (and ((get-method driver/can-connect? :sql-jdbc) driver details) + (sql-jdbc.conn/with-connection-spec-for-testing-connection [spec [driver details]] + ;; jdbc/query is used to see if we throw, we want to ignore the results + (jdbc/query spec (format "SHOW SCHEMAS FROM %s" catalog)) + true))) + ;;; +----------------------------------------------------------------------------------------------------------------+ ;;; | sql-jdbc implementations | ;;; +----------------------------------------------------------------------------------------------------------------+ diff --git a/test/metabase/driver_test.clj b/test/metabase/driver_test.clj index 3caea41014ebb..bb3b1bea6a186 100644 --- a/test/metabase/driver_test.clj +++ b/test/metabase/driver_test.clj @@ -3,6 +3,7 @@ [cheshire.core :as json] [clojure.test :refer :all] [metabase.driver :as driver] + [metabase.driver.h2 :as h2] [metabase.driver.impl :as driver.impl] [metabase.plugins.classloader :as classloader] [metabase.task.sync-databases :as task.sync-databases] @@ -79,12 +80,47 @@ :field-definitions [{:field-name "foo", :base-type :type/Text}] :rows [["bar"]]}]})) +(deftest can-connect-with-destroy-db-test + (testing "driver/can-connect? should fail or throw after destroying a database" + (mt/test-drivers (->> (mt/normal-drivers) + ;; athena is a special case because connections aren't made with a single database, + ;; but to an S3 bucket that may contain many databases + (remove #{:athena})) + (let [database-name (mt/random-name) + dbdef (basic-db-definition database-name)] + (mt/dataset dbdef + (let [db (mt/db) + details (tx/dbdef->connection-details driver/*driver* :db dbdef)] + (testing "can-connect? should return true before deleting the database" + (is (true? (binding [h2/*allow-testing-h2-connections* true] + (driver/can-connect? driver/*driver* details))))) + ;; release db resources like connection pools so we don't have to wait to finish syncing before destroying the db + (driver/notify-database-updated driver/*driver* db) + (testing "after deleting a database, can-connect? should return false or throw an exception" + (let [;; in the case of some cloud databases, the test database is never created, and can't or shouldn't be destroyed. + ;; so fake it by changing the database details + details (case driver/*driver* + (:redshift :snowfake :vertica) (assoc details :db (mt/random-name)) + :oracle (assoc details :service-name (mt/random-name)) + :presto-jdbc (assoc details :catalog (mt/random-name)) + ;; otherwise destroy the db and use the original details + (do + (tx/destroy-db! driver/*driver* dbdef) + details))] + (is (false? (try + (binding [h2/*allow-testing-h2-connections* true] + (driver/can-connect? driver/*driver* details)) + (catch Exception _ + false)))))) + ;; clean up the database + (t2/delete! :model/Database (u/the-id db)))))))) + (deftest check-can-connect-before-sync-test (testing "Database sync should short-circuit and fail if the database at the connection has been deleted (metabase#7526)" (mt/test-drivers (->> (mt/normal-drivers) ;; athena is a special case because connections aren't made with a single database, ;; but to an S3 bucket that may contain many databases - (remove #{:athena :oracle :vertica :presto-jdbc})) + (remove #{:athena})) (let [database-name (mt/random-name) dbdef (basic-db-definition database-name)] (mt/dataset dbdef @@ -95,7 +131,7 @@ (fn [[log-level throwable message]] (and (= log-level :warn) (instance? clojure.lang.ExceptionInfo throwable) - (re-matches #"^Cannot sync Database (.+): (.+)" message))) + (re-matches #"^Cannot sync Database ([\s\S]+): ([\s\S]+)" message))) (mt/with-log-messages-for-level :warn (#'task.sync-databases/sync-and-analyze-database*! (u/the-id db))))))] (testing "sense checks before deleting the database" @@ -107,11 +143,16 @@ ;; release db resources like connection pools so we don't have to wait to finish syncing before destroying the db (driver/notify-database-updated driver/*driver* db) ;; destroy the db - (if (contains? #{:redshift :snowflake} driver/*driver*) - ;; in the case of some cloud databases, the test database is never created, and shouldn't be destroyed. - ;; so fake it by redefining the database name on the dbdef - (t2/update! :model/Database (u/the-id db) - {:details (assoc (:details (mt/db)) :db "fake-db-name-that-definitely-wont-be-used")}) + (if (contains? #{:redshift :snowflake :vertica :presto-jdbc :oracle} driver/*driver*) + ;; in the case of some cloud databases, the test database is never created, and can't or shouldn't be destroyed. + ;; so fake it by changing the database details + (let [details (:details (mt/db)) + new-details (case driver/*driver* + (:redshift :snowflake :vertica) (assoc details :db (mt/random-name)) + :oracle (assoc details :service-name (mt/random-name)) + :presto-jdbc (assoc details :catalog (mt/random-name)))] + (t2/update! :model/Database (u/the-id db) {:details new-details})) + ;; otherwise destroy the db and use the original details (tx/destroy-db! driver/*driver* dbdef)) (testing "after deleting a database, sync should fail" (testing "1: sync-and-analyze-database! should log a warning and fail early" From 4d3a6bd4080db3f84ddd570d4911f287465b480f Mon Sep 17 00:00:00 2001 From: Ngoc Khuat Date: Thu, 30 Nov 2023 21:23:54 +0700 Subject: [PATCH 7/9] Alert/Dashboard subscription: render question title as tag (#36220) --- src/metabase/pulse/render.clj | 12 ++++++++---- test/metabase/pulse/render_test.clj | 10 ++++++++++ 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/src/metabase/pulse/render.clj b/src/metabase/pulse/render.clj index baecdd1b303d3..176535c3b1687 100644 --- a/src/metabase/pulse/render.clj +++ b/src/metabase/pulse/render.clj @@ -26,6 +26,10 @@ "Should the rendered pulse include a card description? (default: `false`)" false) +(defn- card-href + [card] + (h (urls/card-url (:id card)))) + (s/defn ^:private make-title-if-needed :- (s/maybe common/RenderedPulseCard) [render-type card dashcard] (when *include-title* @@ -42,7 +46,10 @@ [:tr [:td {:style (style/style {:padding :0 :margin :0})} - [:span {:style (style/style (style/header-style))} + [:a {:style (style/style (style/header-style)) + :href (card-href card) + :target "_blank" + :rel "noopener noreferrer"} (h card-name)]] [:td {:style (style/style {:text-align :right})} (when *include-buttons* @@ -138,9 +145,6 @@ (log/error e (trs "Pulse card render error")) (body/render :render-error nil nil nil nil nil)))))) -(defn- card-href - [card] - (h (urls/card-url (:id card)))) (s/defn render-pulse-card :- common/RenderedPulseCard "Render a single `card` for a `Pulse` to Hiccup HTML. `result` is the QP results. Returns a map with keys diff --git a/test/metabase/pulse/render_test.clj b/test/metabase/pulse/render_test.clj index 9e2f2d6ed6d8e..985bc15c1624e 100644 --- a/test/metabase/pulse/render_test.clj +++ b/test/metabase/pulse/render_test.clj @@ -198,3 +198,13 @@ (is (= expected (->> (get-in table [3 1]) (map #(peek (get (vec (get % 2)) tax-col)))))))))))) + +(deftest title-should-be-an-a-tag-test + (testing "the title of the card should be an tag so you can click on title using old outlook clients (#12901)" + (mt/with-temp [Card card {:name "A Card" + :dataset_query (mt/mbql-query venues {:limit 1})}] + (mt/with-temp-env-var-value [mb-site-url "https://mb.com"] + (let [rendered-card-content (:content (binding [render/*include-title* true] + (render/render-pulse-card :inline (pulse/defaulted-timezone card) card nil (qp/process-query (:dataset_query card)))))] + (is (some? (mbql.u/match-one rendered-card-content + [:a (_ :guard #(= (format "https://mb.com/question/%d" (:id card)) (:href %))) "A Card"])))))))) From 5bc1ee389cf18971eafdfa54c124b57ced227dd2 Mon Sep 17 00:00:00 2001 From: Kamil Mielnik Date: Thu, 30 Nov 2023 21:40:33 +0700 Subject: [PATCH 8/9] Rename "Aggregatable" to "Aggregable" (#36165) * Rename Aggregatable to Aggregable in frontend * Rename Aggregatable to Aggregable in backend --- frontend/src/metabase-lib/types.ts | 2 +- .../AggregationPicker/AggregationPicker.tsx | 2 +- .../AggregationPicker.unit.spec.tsx | 2 +- .../notebook/steps/AggregateStep/AggregateStep.tsx | 8 ++++---- .../AddAggregationButton/AddAggregationButton.tsx | 2 +- .../AggregationItem/AggregationItem.tsx | 2 +- .../sidebars/SummarizeSidebar/SummarizeSidebar.tsx | 4 ++-- src/metabase/lib/aggregation.cljc | 14 +++++++------- 8 files changed, 18 insertions(+), 18 deletions(-) diff --git a/frontend/src/metabase-lib/types.ts b/frontend/src/metabase-lib/types.ts index 1baf89815b804..0091c695afd89 100644 --- a/frontend/src/metabase-lib/types.ts +++ b/frontend/src/metabase-lib/types.ts @@ -25,7 +25,7 @@ export type MetricMetadata = unknown & { _opaque: typeof MetricMetadata }; declare const AggregationClause: unique symbol; export type AggregationClause = unknown & { _opaque: typeof AggregationClause }; -export type Aggregatable = AggregationClause | MetricMetadata; +export type Aggregable = AggregationClause | MetricMetadata; declare const AggregationOperator: unique symbol; export type AggregationOperator = unknown & { diff --git a/frontend/src/metabase/common/components/AggregationPicker/AggregationPicker.tsx b/frontend/src/metabase/common/components/AggregationPicker/AggregationPicker.tsx index 3a1ea42526bef..5c5d9fb722e37 100644 --- a/frontend/src/metabase/common/components/AggregationPicker/AggregationPicker.tsx +++ b/frontend/src/metabase/common/components/AggregationPicker/AggregationPicker.tsx @@ -39,7 +39,7 @@ interface AggregationPickerProps { legacyQuery: StructuredQuery; legacyClause?: LegacyAggregation; maxHeight?: number; - onSelect: (operator: Lib.Aggregatable) => void; + onSelect: (operator: Lib.Aggregable) => void; onSelectLegacy: (operator: LegacyAggregationClause) => void; onClose?: () => void; } diff --git a/frontend/src/metabase/common/components/AggregationPicker/AggregationPicker.unit.spec.tsx b/frontend/src/metabase/common/components/AggregationPicker/AggregationPicker.unit.spec.tsx index 210515bd6acea..ab216c28789a6 100644 --- a/frontend/src/metabase/common/components/AggregationPicker/AggregationPicker.unit.spec.tsx +++ b/frontend/src/metabase/common/components/AggregationPicker/AggregationPicker.unit.spec.tsx @@ -143,7 +143,7 @@ function setup({ const onSelect = jest.fn(); const onSelectLegacy = jest.fn(); - function handleSelect(clause: Lib.Aggregatable) { + function handleSelect(clause: Lib.Aggregable) { const nextQuery = Lib.aggregate(query, 0, clause); const aggregations = Lib.aggregations(nextQuery, 0); const recentAggregation = aggregations[aggregations.length - 1]; diff --git a/frontend/src/metabase/query_builder/components/notebook/steps/AggregateStep/AggregateStep.tsx b/frontend/src/metabase/query_builder/components/notebook/steps/AggregateStep/AggregateStep.tsx index 78aa7dd11b66e..ff1619c81d451 100644 --- a/frontend/src/metabase/query_builder/components/notebook/steps/AggregateStep/AggregateStep.tsx +++ b/frontend/src/metabase/query_builder/components/notebook/steps/AggregateStep/AggregateStep.tsx @@ -33,14 +33,14 @@ export function AggregateStep({ return Lib.aggregations(topLevelQuery, stageIndex); }, [topLevelQuery, stageIndex]); - const handleAddAggregation = (aggregation: Lib.Aggregatable) => { + const handleAddAggregation = (aggregation: Lib.Aggregable) => { const nextQuery = Lib.aggregate(topLevelQuery, stageIndex, aggregation); updateQuery(nextQuery); }; const handleUpdateAggregation = ( currentClause: Lib.AggregationClause, - nextClause: Lib.Aggregatable, + nextClause: Lib.Aggregable, ) => { const nextQuery = Lib.replaceClause( topLevelQuery, @@ -92,9 +92,9 @@ interface AggregationPopoverProps { clause?: Lib.AggregationClause; onUpdateAggregation: ( currentClause: Lib.AggregationClause, - nextClause: Lib.Aggregatable, + nextClause: Lib.Aggregable, ) => void; - onAddAggregation: (aggregation: Lib.Aggregatable) => void; + onAddAggregation: (aggregation: Lib.Aggregable) => void; legacyQuery: StructuredQuery; clauseIndex?: number; diff --git a/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/AddAggregationButton/AddAggregationButton.tsx b/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/AddAggregationButton/AddAggregationButton.tsx index 7389563ad4ee5..a094d75db78e2 100644 --- a/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/AddAggregationButton/AddAggregationButton.tsx +++ b/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/AddAggregationButton/AddAggregationButton.tsx @@ -11,7 +11,7 @@ const STAGE_INDEX = -1; interface AddAggregationButtonProps { query: Lib.Query; legacyQuery: StructuredQuery; - onAddAggregation: (aggregation: Lib.Aggregatable) => void; + onAddAggregation: (aggregation: Lib.Aggregable) => void; onLegacyQueryChange: (nextLegacyQuery: StructuredQuery) => void; } diff --git a/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/AggregationItem/AggregationItem.tsx b/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/AggregationItem/AggregationItem.tsx index b51818f5bb59d..e850d87455340 100644 --- a/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/AggregationItem/AggregationItem.tsx +++ b/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/AggregationItem/AggregationItem.tsx @@ -11,7 +11,7 @@ interface AggregationItemProps { aggregation: Lib.AggregationClause; aggregationIndex: number; legacyQuery: StructuredQuery; - onUpdate: (nextAggregation: Lib.Aggregatable) => void; + onUpdate: (nextAggregation: Lib.Aggregable) => void; onRemove: () => void; onLegacyQueryChange: (nextLegacyQuery: StructuredQuery) => void; } diff --git a/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/SummarizeSidebar.tsx b/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/SummarizeSidebar.tsx index f0698ac1c4a62..18ea45b820ba9 100644 --- a/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/SummarizeSidebar.tsx +++ b/frontend/src/metabase/query_builder/components/view/sidebars/SummarizeSidebar/SummarizeSidebar.tsx @@ -60,7 +60,7 @@ export function SummarizeSidebar({ ); const handleAddAggregation = useCallback( - (aggregation: Lib.Aggregatable) => { + (aggregation: Lib.Aggregable) => { const nextQuery = Lib.aggregate(query, STAGE_INDEX, aggregation); onQueryChange(nextQuery); }, @@ -68,7 +68,7 @@ export function SummarizeSidebar({ ); const handleUpdateAggregation = useCallback( - (aggregation: Lib.AggregationClause, nextAggregation: Lib.Aggregatable) => { + (aggregation: Lib.AggregationClause, nextAggregation: Lib.Aggregable) => { const nextQuery = Lib.replaceClause( query, STAGE_INDEX, diff --git a/src/metabase/lib/aggregation.cljc b/src/metabase/lib/aggregation.cljc index 41d50ea051bdb..70353f84076ee 100644 --- a/src/metabase/lib/aggregation.cljc +++ b/src/metabase/lib/aggregation.cljc @@ -224,7 +224,7 @@ [aggregation-clause] aggregation-clause) -(def ^:private Aggregatable +(def ^:private Aggregable "Schema for something you can pass to [[aggregate]] to add to a query as an aggregation." [:or ::lib.schema.aggregation/aggregation @@ -233,16 +233,16 @@ (mu/defn aggregate :- ::lib.schema/query "Adds an aggregation to query." - ([query aggregatable] - (aggregate query -1 aggregatable)) + ([query aggregable] + (aggregate query -1 aggregable)) ([query :- ::lib.schema/query stage-number :- :int - aggregatable :- Aggregatable] + aggregable :- Aggregable] ;; if this is a Metric metadata, convert it to `:metric` MBQL clause before adding. - (if (= (lib.dispatch/dispatch-value aggregatable) :metadata/metric) - (recur query stage-number (lib.ref/ref aggregatable)) - (lib.util/add-summary-clause query stage-number :aggregation aggregatable)))) + (if (= (lib.dispatch/dispatch-value aggregable) :metadata/metric) + (recur query stage-number (lib.ref/ref aggregable)) + (lib.util/add-summary-clause query stage-number :aggregation aggregable)))) (mu/defn aggregations :- [:maybe [:sequential ::lib.schema.aggregation/aggregation]] "Get the aggregations in a given stage of a query." From 9cd2bdbfbd519c842911adc9b13e839d0eec87c7 Mon Sep 17 00:00:00 2001 From: shaun Date: Thu, 30 Nov 2023 09:55:10 -0600 Subject: [PATCH 9/9] Disable autofocus on SettingsSAMLForm (#36150) * Disable autofocus on SettingsSAMLForm * Update index.js --- .../auth/components/SettingsSAMLForm/SettingsSAMLForm.jsx | 1 - enterprise/frontend/src/metabase-enterprise/auth/index.js | 1 - 2 files changed, 2 deletions(-) diff --git a/enterprise/frontend/src/metabase-enterprise/auth/components/SettingsSAMLForm/SettingsSAMLForm.jsx b/enterprise/frontend/src/metabase-enterprise/auth/components/SettingsSAMLForm/SettingsSAMLForm.jsx index a74f0b15a71a7..e75d151284639 100644 --- a/enterprise/frontend/src/metabase-enterprise/auth/components/SettingsSAMLForm/SettingsSAMLForm.jsx +++ b/enterprise/frontend/src/metabase-enterprise/auth/components/SettingsSAMLForm/SettingsSAMLForm.jsx @@ -131,7 +131,6 @@ const SettingsSAMLForm = ({ elements = [], settingValues = {}, onSubmit }) => { label={t`SAML Identity Provider URL`} placeholder="https://your-org-name.yourIDP.com" required - autoFocus /> ({ placeholder: "https://example.com/app/my_saml_app/abc123/sso/saml", type: "string", required: true, - autoFocus: true, }, { key: "saml-identity-provider-issuer",