Skip to content

Commit

Permalink
Merge branch 'metabase:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
zaycev authored Nov 30, 2023
2 parents 1abdb92 + 9cd2bdb commit c96cfe2
Show file tree
Hide file tree
Showing 35 changed files with 613 additions and 461 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,10 @@ jobs:
DOWNLOAD_URL: https://github.com/clj-kondo/clj-kondo/releases/download
steps:
- uses: actions/checkout@v3
- name: Prepare back-end environment
uses: ./.github/actions/prepare-backend
with:
m2-cache-key: 'kondo'
- name: Install clj-kondo
run: |
curl -OL ${DOWNLOAD_URL}/v${CLJ_KONDO_VERSION}/clj-kondo-${CLJ_KONDO_VERSION}-linux-static-amd64.zip
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/e2e-tests-replay.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: E2E Tests using Replay.io browser
on:
# We'll record runs using Replay.io and their browser on a schedule as an experiment
schedule:
- cron: '0 */3 * * *'
- cron: '0 */2 * * *'

jobs:
e2e-matrix-builder:
Expand All @@ -19,7 +19,7 @@ jobs:


e2e-tests:
needs: [test-run-id, e2e-matrix-builder]
needs: e2e-matrix-builder
runs-on: ${{ matrix.runner }}
timeout-minutes: 90
name: e2e-tests-${{ matrix.name }}-${{ matrix.edition }}
Expand Down
3 changes: 3 additions & 0 deletions docs/developers-guide/driver-changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,9 @@ title: Driver interface changelog
will be used to call the corresponding `metabase.driver.sql.query-processor/date` implementation to convert the `field`.
Returns `nil` if the conversion is not necessary for this `field` and `param-type` combination.

- The multimethod `metabase.driver.sql-jdbc.execute/inject-remark` has been added. It allows JDBC-based drivers to
override the default behavior of how SQL query remarks are added to queries (prepending them as a comment).

## Metabase 0.47.0

- A new driver feature has been added: `:schemas`. This feature signals whether the database organizes tables in
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ const SettingsSAMLForm = ({ elements = [], settingValues = {}, onSubmit }) => {
label={t`SAML Identity Provider URL`}
placeholder="https://your-org-name.yourIDP.com"
required
autoFocus
/>
<FormTextarea
{...fields["saml-identity-provider-certificate"]}
Expand Down
1 change: 0 additions & 1 deletion enterprise/frontend/src/metabase-enterprise/auth/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ PLUGIN_ADMIN_SETTINGS_UPDATES.push(sections => ({
placeholder: "https://example.com/app/my_saml_app/abc123/sso/saml",
type: "string",
required: true,
autoFocus: true,
},
{
key: "saml-identity-provider-issuer",
Expand Down
2 changes: 1 addition & 1 deletion frontend/src/metabase-lib/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export type MetricMetadata = unknown & { _opaque: typeof MetricMetadata };
declare const AggregationClause: unique symbol;
export type AggregationClause = unknown & { _opaque: typeof AggregationClause };

export type Aggregatable = AggregationClause | MetricMetadata;
export type Aggregable = AggregationClause | MetricMetadata;

declare const AggregationOperator: unique symbol;
export type AggregationOperator = unknown & {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ interface AggregationPickerProps {
legacyQuery: StructuredQuery;
legacyClause?: LegacyAggregation;
maxHeight?: number;
onSelect: (operator: Lib.Aggregatable) => void;
onSelect: (operator: Lib.Aggregable) => void;
onSelectLegacy: (operator: LegacyAggregationClause) => void;
onClose?: () => void;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ function setup({
const onSelect = jest.fn();
const onSelectLegacy = jest.fn();

function handleSelect(clause: Lib.Aggregatable) {
function handleSelect(clause: Lib.Aggregable) {
const nextQuery = Lib.aggregate(query, 0, clause);
const aggregations = Lib.aggregations(nextQuery, 0);
const recentAggregation = aggregations[aggregations.length - 1];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,14 @@ export function AggregateStep({
return Lib.aggregations(topLevelQuery, stageIndex);
}, [topLevelQuery, stageIndex]);

const handleAddAggregation = (aggregation: Lib.Aggregatable) => {
const handleAddAggregation = (aggregation: Lib.Aggregable) => {
const nextQuery = Lib.aggregate(topLevelQuery, stageIndex, aggregation);
updateQuery(nextQuery);
};

const handleUpdateAggregation = (
currentClause: Lib.AggregationClause,
nextClause: Lib.Aggregatable,
nextClause: Lib.Aggregable,
) => {
const nextQuery = Lib.replaceClause(
topLevelQuery,
Expand Down Expand Up @@ -92,9 +92,9 @@ interface AggregationPopoverProps {
clause?: Lib.AggregationClause;
onUpdateAggregation: (
currentClause: Lib.AggregationClause,
nextClause: Lib.Aggregatable,
nextClause: Lib.Aggregable,
) => void;
onAddAggregation: (aggregation: Lib.Aggregatable) => void;
onAddAggregation: (aggregation: Lib.Aggregable) => void;

legacyQuery: StructuredQuery;
clauseIndex?: number;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ const STAGE_INDEX = -1;
interface AddAggregationButtonProps {
query: Lib.Query;
legacyQuery: StructuredQuery;
onAddAggregation: (aggregation: Lib.Aggregatable) => void;
onAddAggregation: (aggregation: Lib.Aggregable) => void;
onLegacyQueryChange: (nextLegacyQuery: StructuredQuery) => void;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ interface AggregationItemProps {
aggregation: Lib.AggregationClause;
aggregationIndex: number;
legacyQuery: StructuredQuery;
onUpdate: (nextAggregation: Lib.Aggregatable) => void;
onUpdate: (nextAggregation: Lib.Aggregable) => void;
onRemove: () => void;
onLegacyQueryChange: (nextLegacyQuery: StructuredQuery) => void;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,15 +60,15 @@ export function SummarizeSidebar({
);

const handleAddAggregation = useCallback(
(aggregation: Lib.Aggregatable) => {
(aggregation: Lib.Aggregable) => {
const nextQuery = Lib.aggregate(query, STAGE_INDEX, aggregation);
onQueryChange(nextQuery);
},
[query, onQueryChange],
);

const handleUpdateAggregation = useCallback(
(aggregation: Lib.AggregationClause, nextAggregation: Lib.Aggregatable) => {
(aggregation: Lib.AggregationClause, nextAggregation: Lib.Aggregable) => {
const nextQuery = Lib.replaceClause(
query,
STAGE_INDEX,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@
'("https://www.googleapis.com/auth/bigquery"
"https://www.googleapis.com/auth/drive"))

(defn- database->client
^BigQuery [database]
(let [creds (bigquery.common/database-details->service-account-credential (:details database))
(defn- database-details->client
^BigQuery [details]
(let [creds (bigquery.common/database-details->service-account-credential details)
bq-bldr (doto (BigQueryOptions/newBuilder)
(.setCredentials (.createScoped creds bigquery-scopes)))]
(.. bq-bldr build getService)))
Expand All @@ -65,25 +65,23 @@

(defn- list-tables
"Fetch all tables (new pages are loaded automatically by the API)."
(^Iterable [database]
(list-tables database false))
(^Iterable [{{:keys [project-id dataset-filters-type dataset-filters-patterns]} :details, :as database} validate-dataset?]
(list-tables (database->client database)
(or project-id (bigquery.common/database-details->credential-project-id (:details database)))
dataset-filters-type
dataset-filters-patterns
(boolean validate-dataset?)))
(^Iterable [^BigQuery client ^String project-id ^String filter-type ^String filter-patterns ^Boolean validate-dataset?]
(let [datasets (.listDatasets client project-id (u/varargs BigQuery$DatasetListOption))
inclusion-patterns (when (= "inclusion" filter-type) filter-patterns)
exclusion-patterns (when (= "exclusion" filter-type) filter-patterns)
(^Iterable [database-details]
(list-tables database-details {:validate-dataset? false}))
(^Iterable [{:keys [project-id dataset-filters-type dataset-filters-patterns] :as details} {:keys [validate-dataset?]}]
(let [client (database-details->client details)
project-id (or project-id (bigquery.common/database-details->credential-project-id details))
datasets (.listDatasets client project-id (u/varargs BigQuery$DatasetListOption))
inclusion-patterns (when (= "inclusion" dataset-filters-type) dataset-filters-patterns)
exclusion-patterns (when (= "exclusion" dataset-filters-type) dataset-filters-patterns)
dataset-iter (for [^Dataset dataset (.iterateAll datasets)
:let [^DatasetId dataset-id (.. dataset getDatasetId)]
:when (driver.s/include-schema? inclusion-patterns
exclusion-patterns
(.getDataset dataset-id))]
dataset-id)]
(when (and (not= filter-type "all") validate-dataset? (zero? (count dataset-iter)))
(when (and (not= dataset-filters-type "all")
validate-dataset?
(zero? (count dataset-iter)))
(throw (ex-info (tru "Looks like we cannot find any matching datasets.")
{::driver/can-connect-message? true})))
(apply concat (for [^DatasetId dataset-id dataset-iter]
Expand All @@ -94,7 +92,7 @@

(defmethod driver/describe-database :bigquery-cloud-sdk
[_ database]
(let [tables (list-tables database)]
(let [tables (list-tables (:details database))]
{:tables (set (for [^Table table tables
:let [^TableId table-id (.getTableId table)
^String dataset-id (.getDataset table-id)]]
Expand All @@ -103,7 +101,7 @@
(defmethod driver/can-connect? :bigquery-cloud-sdk
[_ details-map]
;; check whether we can connect by seeing whether listing tables succeeds
(try (some? (list-tables {:details details-map} ::validate-dataset))
(try (some? (list-tables details-map {:validate-dataset? true}))
(catch Exception e
(when (::driver/can-connect-message? (ex-data e))
(throw e))
Expand All @@ -115,7 +113,7 @@

(mu/defn ^:private get-table :- (ms/InstanceOfClass Table)
(^Table [{{:keys [project-id]} :details, :as database} dataset-id table-id]
(get-table (database->client database) project-id dataset-id table-id))
(get-table (database-details->client (:details database)) project-id dataset-id table-id))

(^Table [^BigQuery client :- (ms/InstanceOfClass BigQuery)
project-id :- [:maybe ::lib.schema.common/non-blank-string]
Expand Down Expand Up @@ -297,7 +295,7 @@
(defn- execute-bigquery-on-db
^TableResult [database sql parameters cancel-chan cancel-requested?]
(execute-bigquery
(database->client database)
(database-details->client (:details database))
sql
parameters
cancel-chan
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
(defn- bigquery
"Get an instance of a `Bigquery` client."
^BigQuery []
(#'bigquery/database->client {:details (test-db-details)}))
(#'bigquery/database-details->client (test-db-details)))

(defn project-id
"BigQuery project ID that we're using for tests, either from the env var `MB_BIGQUERY_TEST_PROJECT_ID`, or if that is
Expand All @@ -82,7 +82,10 @@

(defmethod tx/dbdef->connection-details :bigquery-cloud-sdk
[_driver _context {:keys [database-name]}]
(assoc (test-db-details) :dataset-id (test-dataset-id database-name) :include-user-id-and-hash true))
(assoc (test-db-details)
:dataset-filters-type "inclusion"
:dataset-filters-patterns (test-dataset-id database-name)
:include-user-id-and-hash true))


;;; -------------------------------------------------- Loading Data --------------------------------------------------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -612,6 +612,14 @@
[_driver _database _table]
nil)

(defmethod driver/can-connect? :presto-jdbc
[driver {:keys [catalog], :as details}]
(and ((get-method driver/can-connect? :sql-jdbc) driver details)
(sql-jdbc.conn/with-connection-spec-for-testing-connection [spec [driver details]]
;; jdbc/query is used to see if we throw, we want to ignore the results
(jdbc/query spec (format "SHOW SCHEMAS FROM %s" catalog))
true)))

;;; +----------------------------------------------------------------------------------------------------------------+
;;; | sql-jdbc implementations |
;;; +----------------------------------------------------------------------------------------------------------------+
Expand Down
26 changes: 26 additions & 0 deletions modules/drivers/snowflake/src/metabase/driver/snowflake.clj
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
(ns metabase.driver.snowflake
"Snowflake Driver."
(:require
[buddy.core.codecs :as codecs]
[cheshire.core :as json]
[clojure.java.jdbc :as jdbc]
[clojure.set :as set]
[clojure.string :as str]
Expand All @@ -24,9 +26,11 @@
[metabase.driver.sync :as driver.s]
[metabase.lib.metadata :as lib.metadata]
[metabase.models.secret :as secret]
[metabase.public-settings :as public-settings]
[metabase.query-processor.error-type :as qp.error-type]
[metabase.query-processor.store :as qp.store]
[metabase.query-processor.timezone :as qp.timezone]
[metabase.query-processor.util :as qp.util]
[metabase.query-processor.util.add-alias-info :as add]
[metabase.util :as u]
[metabase.util.date-2 :as u.date]
Expand Down Expand Up @@ -587,6 +591,28 @@
[driver ps i t]
(sql-jdbc.execute/set-parameter driver ps i (t/sql-timestamp (t/with-zone-same-instant t (t/zone-id "UTC")))))

;;; --------------------------------------------------- Query remarks ---------------------------------------------------

; Snowflake strips comments prepended to the SQL statement (default remark injection behavior). We should append the
; remark instead.
(defmethod sql-jdbc.execute/inject-remark :snowflake
[_ sql remark]
(str sql "\n\n-- " remark))

(defmethod qp.util/query->remark :snowflake
[_ {{:keys [context executed-by card-id pulse-id dashboard-id query-hash]} :info,
query-type :type,
database-id :database}]
(json/generate-string {:client "Metabase"
:context context
:queryType query-type
:userId executed-by
:pulseId pulse-id
:cardId card-id
:dashboardId dashboard-id
:databaseId database-id
:queryHash (when (bytes? query-hash) (codecs/bytes->hex query-hash))
:serverId (public-settings/site-uuid)}))

;;; ------------------------------------------------- User Impersonation --------------------------------------------------

Expand Down
44 changes: 44 additions & 0 deletions modules/drivers/snowflake/test/metabase/driver/snowflake_test.clj
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
(ns metabase.driver.snowflake-test
(:require
[clojure.data.json :as json]
[clojure.java.jdbc :as jdbc]
[clojure.set :as set]
[clojure.string :as str]
[clojure.test :refer :all]
[metabase.driver :as driver]
[metabase.driver.sql :as driver.sql]
[metabase.driver.sql-jdbc.connection :as sql-jdbc.conn]
[metabase.driver.sql-jdbc.execute :as sql-jdbc.execute]
[metabase.models :refer [Table]]
[metabase.models.database :refer [Database]]
[metabase.public-settings :as public-settings]
[metabase.query-processor :as qp]
[metabase.sync :as sync]
[metabase.sync.util :as sync-util]
Expand All @@ -26,6 +29,17 @@

(set! *warn-on-reflection* true)

(defn- query->native [query]
(let [check-sql-fn (fn [_ _ sql & _]
(throw (ex-info "done" {::native-query sql})))]
(with-redefs [sql-jdbc.execute/prepared-statement check-sql-fn
sql-jdbc.execute/execute-statement! check-sql-fn]
(try
(qp/process-query query)
(is false "no statement created")
(catch Exception e
(-> e u/all-ex-data ::native-query))))))

(use-fixtures :each (fn [thunk]
;; 1. If sync fails when loading a test dataset, don't swallow the error; throw an Exception so we
;; can debug it. This is much less confusing when trying to fix broken tests.
Expand Down Expand Up @@ -298,3 +312,33 @@
;; Special characters
(is (= "USE ROLE \"Role.123\";" (driver.sql/set-role-statement :snowflake "Role.123")))
(is (= "USE ROLE \"$role\";" (driver.sql/set-role-statement :snowflake "$role")))))

(deftest remark-test
(testing "Queries should have a remark formatted as JSON appended to them with additional metadata"
(mt/test-driver :snowflake
(let [expected-map {"pulseId" nil
"serverId" (public-settings/site-uuid)
"client" "Metabase"
"queryHash" "cb83d4f6eedc250edb0f2c16f8d9a21e5d42f322ccece1494c8ef3d634581fe2"
"queryType" "query"
"cardId" 1234
"dashboardId" 5678
"context" "ad-hoc"
"userId" 1000
"databaseId" (mt/id)}
result-query (driver/prettify-native-form :snowflake
(query->native
(assoc
(mt/mbql-query users {:limit 2000})
:parameters [{:type "id"
:target [:dimension [:field (mt/id :users :id) nil]]
:value ["1" "2" "3"]}]
:info {:executed-by 1000
:card-id 1234
:dashboard-id 5678
:context :ad-hoc
:query-hash (byte-array [-53 -125 -44 -10 -18 -36 37 14 -37 15 44 22 -8 -39 -94 30
93 66 -13 34 -52 -20 -31 73 76 -114 -13 -42 52 88 31 -30])})))
result-comment (second (re-find #"-- (\{.*\})" result-query))
result-map (json/read-str result-comment)]
(is (= expected-map result-map))))))
Loading

0 comments on commit c96cfe2

Please sign in to comment.