+
+ Selector
+ {editMode ? (
+ onChange({ selector })}
+ optionLabelProp="label"
+ dropdownMatchSelectWidth={false}
+ style={{ width: 80 }}
+ >
+
+ first
+
+
+ min
+
+
+ max
+
+
+ ) : (
+ {alertOptions.selector}
+ )}
+
Value column
{editMode ? (
onChange({ column })}
+ onChange={(column) => onChange({ column })}
dropdownMatchSelectWidth={false}
- style={{ minWidth: 100 }}>
- {columnNames.map(name => (
+ style={{ minWidth: 100 }}
+ >
+ {columnNames.map((name) => (
{name}
))}
@@ -83,10 +134,11 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
{editMode ? (
onChange({ op })}
+ onChange={(op) => onChange({ op })}
optionLabelProp="label"
dropdownMatchSelectWidth={false}
- style={{ width: 55 }}>
+ style={{ width: 55 }}
+ >
"]}>
{CONDITIONS[">"]} greater than
@@ -125,7 +177,7 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
id="threshold-criterion"
style={{ width: 90 }}
value={alertOptions.value}
- onChange={e => onChange({ value: e.target.value })}
+ onChange={(e) => onChange({ value: e.target.value })}
/>
) : (
{alertOptions.value}
diff --git a/client/app/pages/alert/components/MenuButton.jsx b/client/app/pages/alert/components/MenuButton.jsx
index ce1a0b839f..1aeb64bda5 100644
--- a/client/app/pages/alert/components/MenuButton.jsx
+++ b/client/app/pages/alert/components/MenuButton.jsx
@@ -11,7 +11,7 @@ import LoadingOutlinedIcon from "@ant-design/icons/LoadingOutlined";
import EllipsisOutlinedIcon from "@ant-design/icons/EllipsisOutlined";
import PlainButton from "@/components/PlainButton";
-export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
+export default function MenuButton({ doDelete, canEdit, mute, unmute, evaluate, muted }) {
const [loading, setLoading] = useState(false);
const execute = useCallback(action => {
@@ -55,6 +55,9 @@ export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
Delete
+
+ execute(evaluate)}>Evaluate
+
}>
@@ -69,6 +72,7 @@ MenuButton.propTypes = {
canEdit: PropTypes.bool.isRequired,
mute: PropTypes.func.isRequired,
unmute: PropTypes.func.isRequired,
+ evaluate: PropTypes.func.isRequired,
muted: PropTypes.bool,
};
diff --git a/client/app/pages/dashboards/components/DashboardHeader.jsx b/client/app/pages/dashboards/components/DashboardHeader.jsx
index 9c8bf82655..b8b27a3920 100644
--- a/client/app/pages/dashboards/components/DashboardHeader.jsx
+++ b/client/app/pages/dashboards/components/DashboardHeader.jsx
@@ -15,6 +15,7 @@ import { DashboardTagsControl } from "@/components/tags-control/TagsControl";
import getTags from "@/services/getTags";
import { clientConfig } from "@/services/auth";
import { policy } from "@/services/policy";
+import recordEvent from "@/services/recordEvent";
import { durationHumanize } from "@/lib/utils";
import { DashboardStatusEnum } from "../hooks/useDashboard";
@@ -118,6 +119,8 @@ function DashboardMoreOptionsButton({ dashboardConfiguration }) {
managePermissions,
gridDisabled,
isDashboardOwnerOrAdmin,
+ isDuplicating,
+ duplicateDashboard,
} = dashboardConfiguration;
const archive = () => {
@@ -141,6 +144,14 @@ function DashboardMoreOptionsButton({ dashboardConfiguration }) {
setEditingLayout(true)}>Edit
+ {!isDuplicating && dashboard.canEdit() && (
+
+
+ Fork
+ (opens in a new tab)
+
+
+ )}
{clientConfig.showPermissionsControl && isDashboardOwnerOrAdmin && (
Manage Permissions
@@ -175,6 +186,7 @@ function DashboardControl({ dashboardConfiguration, headerExtra }) {
fullscreen,
toggleFullscreen,
showShareDashboardDialog,
+ updateDashboard,
} = dashboardConfiguration;
const showPublishButton = dashboard.is_draft;
const showRefreshButton = true;
@@ -182,8 +194,14 @@ function DashboardControl({ dashboardConfiguration, headerExtra }) {
const canShareDashboard = canEditDashboard && !dashboard.is_draft;
const showShareButton = !clientConfig.disablePublicUrls && (dashboard.publicAccessEnabled || canShareDashboard);
const showMoreOptionsButton = canEditDashboard;
+
+ const unarchiveDashboard = () => {
+ recordEvent("unarchive", "dashboard", dashboard.id);
+ updateDashboard({ is_archived: false }, false);
+ };
return (
+ {dashboard.can_edit && dashboard.is_archived &&
Unarchive }
{!dashboard.is_archived && (
{showPublishButton && (
diff --git a/client/app/pages/dashboards/components/ShareDashboardDialog.jsx b/client/app/pages/dashboards/components/ShareDashboardDialog.jsx
index 838e9e30ea..5473a0b76b 100644
--- a/client/app/pages/dashboards/components/ShareDashboardDialog.jsx
+++ b/client/app/pages/dashboards/components/ShareDashboardDialog.jsx
@@ -94,12 +94,12 @@ class ShareDashboardDialog extends React.Component {
};
render() {
- const { dialog, dashboard } = this.props;
-
+ const { dialog, dashboard, hasOnlySafeQueries } = this.props;
+ const headerContent = this.constructor.headerContent;
return (
-
+
)}
+
{
const aclUrl = `api/dashboards/${dashboard.id}/acl`;
PermissionsEditorDialog.showModal({
@@ -243,6 +246,8 @@ function useDashboard(dashboardData) {
showAddTextboxDialog,
showAddWidgetDialog,
managePermissions,
+ isDuplicating,
+ duplicateDashboard,
};
}
diff --git a/client/app/pages/dashboards/hooks/useDuplicateDashboard.js b/client/app/pages/dashboards/hooks/useDuplicateDashboard.js
new file mode 100644
index 0000000000..80d68af211
--- /dev/null
+++ b/client/app/pages/dashboards/hooks/useDuplicateDashboard.js
@@ -0,0 +1,40 @@
+import { noop, extend, pick } from "lodash";
+import { useCallback, useState } from "react";
+import url from "url";
+import qs from "query-string";
+import { Dashboard } from "@/services/dashboard";
+
+function keepCurrentUrlParams(targetUrl) {
+ const currentUrlParams = qs.parse(window.location.search);
+ targetUrl = url.parse(targetUrl);
+ const targetUrlParams = qs.parse(targetUrl.search);
+ return url.format(
+ extend(pick(targetUrl, ["protocol", "auth", "host", "pathname"]), {
+ search: qs.stringify(extend(currentUrlParams, targetUrlParams)),
+ })
+ );
+}
+
+export default function useDuplicateDashboard(dashboard) {
+ const [isDuplicating, setIsDuplicating] = useState(false);
+
+ const duplicateDashboard = useCallback(() => {
+ // To prevent opening the same tab, name must be unique for each browser
+ const tabName = `duplicatedDashboardTab/${Math.random().toString()}`;
+
+ // We should open tab here because this moment is a part of user interaction;
+ // later browser will block such attempts
+ const tab = window.open("", tabName);
+
+ setIsDuplicating(true);
+ Dashboard.fork({ id: dashboard.id })
+ .then(newDashboard => {
+ tab.location = keepCurrentUrlParams(newDashboard.getUrl());
+ })
+ .finally(() => {
+ setIsDuplicating(false);
+ });
+ }, [dashboard.id]);
+
+ return [isDuplicating, isDuplicating ? noop : duplicateDashboard];
+}
diff --git a/client/app/pages/home/Home.jsx b/client/app/pages/home/Home.jsx
index c6f55d646d..98c35d3067 100644
--- a/client/app/pages/home/Home.jsx
+++ b/client/app/pages/home/Home.jsx
@@ -6,7 +6,6 @@ import Link from "@/components/Link";
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
import EmptyState, { EmptyStateHelpMessage } from "@/components/empty-state/EmptyState";
import DynamicComponent from "@/components/DynamicComponent";
-import BeaconConsent from "@/components/BeaconConsent";
import PlainButton from "@/components/PlainButton";
import { axios } from "@/services/axios";
@@ -89,7 +88,6 @@ export default function Home() {
-
);
diff --git a/client/app/pages/queries-list/queries-list.css b/client/app/pages/queries-list/queries-list.css
index 619a72c8a8..720da8d1e0 100644
--- a/client/app/pages/queries-list/queries-list.css
+++ b/client/app/pages/queries-list/queries-list.css
@@ -3,10 +3,6 @@
height: 35px;
}
-.page-queries-list .page-header-actions {
- width: 25%; /* same as sidebar */
- max-width: 350px; /* same as sidebar */
-}
/* same rule as for sidebar */
@media (max-width: 990px) {
diff --git a/client/app/pages/queries/QuerySource.jsx b/client/app/pages/queries/QuerySource.jsx
index 774b720963..f492b99324 100644
--- a/client/app/pages/queries/QuerySource.jsx
+++ b/client/app/pages/queries/QuerySource.jsx
@@ -134,11 +134,10 @@ function QuerySource(props) {
// choose data source id for new queries
if (dataSourcesLoaded && queryFlags.isNew) {
const firstDataSourceId = dataSources.length > 0 ? dataSources[0].id : null;
+ const selectedDataSourceId = parseInt(localStorage.getItem("lastSelectedDataSourceId")) || null;
+
handleDataSourceChange(
- chooseDataSourceId(
- [query.data_source_id, localStorage.getItem("lastSelectedDataSourceId"), firstDataSourceId],
- dataSources
- )
+ chooseDataSourceId([query.data_source_id, selectedDataSourceId, firstDataSourceId], dataSources)
);
}
}, [query.data_source_id, queryFlags.isNew, dataSourcesLoaded, dataSources, handleDataSourceChange]);
diff --git a/client/app/pages/queries/QuerySource.less b/client/app/pages/queries/QuerySource.less
index 2c161517c2..c64b888d1d 100644
--- a/client/app/pages/queries/QuerySource.less
+++ b/client/app/pages/queries/QuerySource.less
@@ -37,9 +37,10 @@
&.active {
overflow: visible;
+ max-height: unset !important;
.ant-input {
resize: vertical;
- max-height: 150px - 15px * 2;
+ height: 30vh;
}
}
}
diff --git a/client/app/pages/queries/components/QueryExecutionMetadata.jsx b/client/app/pages/queries/components/QueryExecutionMetadata.jsx
index 58fb384508..fd713c8ccc 100644
--- a/client/app/pages/queries/components/QueryExecutionMetadata.jsx
+++ b/client/app/pages/queries/components/QueryExecutionMetadata.jsx
@@ -9,6 +9,7 @@ import QueryControlDropdown from "@/components/EditVisualizationButton/QueryCont
import EditVisualizationButton from "@/components/EditVisualizationButton";
import useQueryResultData from "@/lib/useQueryResultData";
import { durationHumanize, pluralize, prettySize } from "@/lib/utils";
+import { isUndefined } from "lodash";
import "./QueryExecutionMetadata.less";
@@ -51,7 +52,8 @@ export default function QueryExecutionMetadata({
"Result truncated to " +
queryResultData.rows.length +
" rows. Databricks may truncate query results that are unstably large."
- }>
+ }
+ >
@@ -67,10 +69,9 @@ export default function QueryExecutionMetadata({
)}
{isQueryExecuting &&
Running… }
- {queryResultData.metadata.data_scanned && (
+ {!isUndefined(queryResultData.metadata.data_scanned) && !isQueryExecuting && (
- Data Scanned
- {prettySize(queryResultData.metadata.data_scanned)}
+ Data Scanned {prettySize(queryResultData.metadata.data_scanned)}
)}
diff --git a/client/app/pages/settings/components/GeneralSettings/BeaconConsentSettings.jsx b/client/app/pages/settings/components/GeneralSettings/BeaconConsentSettings.jsx
deleted file mode 100644
index 9dd998f4ff..0000000000
--- a/client/app/pages/settings/components/GeneralSettings/BeaconConsentSettings.jsx
+++ /dev/null
@@ -1,38 +0,0 @@
-import React from "react";
-import Form from "antd/lib/form";
-import Checkbox from "antd/lib/checkbox";
-import Skeleton from "antd/lib/skeleton";
-import HelpTrigger from "@/components/HelpTrigger";
-import DynamicComponent from "@/components/DynamicComponent";
-import { SettingsEditorPropTypes, SettingsEditorDefaultProps } from "../prop-types";
-
-export default function BeaconConsentSettings(props) {
- const { values, onChange, loading } = props;
-
- return (
-
-
- Anonymous Usage Data Sharing
-
-
- }>
- {loading ? (
-
- ) : (
- onChange({ beacon_consent: e.target.checked })}>
- Help Redash improve by automatically sending anonymous usage data
-
- )}
-
-
- );
-}
-
-BeaconConsentSettings.propTypes = SettingsEditorPropTypes;
-
-BeaconConsentSettings.defaultProps = SettingsEditorDefaultProps;
diff --git a/client/app/pages/settings/components/GeneralSettings/index.jsx b/client/app/pages/settings/components/GeneralSettings/index.jsx
index 3186c83a28..6b02cd2a1e 100644
--- a/client/app/pages/settings/components/GeneralSettings/index.jsx
+++ b/client/app/pages/settings/components/GeneralSettings/index.jsx
@@ -4,7 +4,6 @@ import DynamicComponent from "@/components/DynamicComponent";
import FormatSettings from "./FormatSettings";
import PlotlySettings from "./PlotlySettings";
import FeatureFlagsSettings from "./FeatureFlagsSettings";
-import BeaconConsentSettings from "./BeaconConsentSettings";
export default function GeneralSettings(props) {
return (
@@ -14,7 +13,6 @@ export default function GeneralSettings(props) {
-
);
}
diff --git a/client/app/services/alert.js b/client/app/services/alert.js
index 2171c27c6a..704d725ce6 100644
--- a/client/app/services/alert.js
+++ b/client/app/services/alert.js
@@ -36,6 +36,7 @@ const Alert = {
delete: data => axios.delete(`api/alerts/${data.id}`),
mute: data => axios.post(`api/alerts/${data.id}/mute`),
unmute: data => axios.delete(`api/alerts/${data.id}/mute`),
+ evaluate: data => axios.post(`api/alerts/${data.id}/eval`),
};
export default Alert;
diff --git a/client/app/services/dashboard.js b/client/app/services/dashboard.js
index 11c8899945..a4d3550ba5 100644
--- a/client/app/services/dashboard.js
+++ b/client/app/services/dashboard.js
@@ -172,6 +172,7 @@ const DashboardService = {
favorites: params => axios.get("api/dashboards/favorites", { params }).then(transformResponse),
favorite: ({ id }) => axios.post(`api/dashboards/${id}/favorite`),
unfavorite: ({ id }) => axios.delete(`api/dashboards/${id}/favorite`),
+ fork: ({ id }) => axios.post(`api/dashboards/${id}/fork`, { id }).then(transformResponse),
};
_.extend(Dashboard, DashboardService);
@@ -265,3 +266,7 @@ Dashboard.prototype.favorite = function favorite() {
Dashboard.prototype.unfavorite = function unfavorite() {
return Dashboard.unfavorite(this);
};
+
+Dashboard.prototype.getUrl = function getUrl() {
+ return urlForDashboard(this);
+};
diff --git a/client/app/services/parameters/DateParameter.js b/client/app/services/parameters/DateParameter.js
index 3d386b384d..18d850aef4 100644
--- a/client/app/services/parameters/DateParameter.js
+++ b/client/app/services/parameters/DateParameter.js
@@ -61,7 +61,7 @@ class DateParameter extends Parameter {
return value;
}
- const normalizedValue = moment(value);
+ const normalizedValue = moment(value, moment.ISO_8601, true);
return normalizedValue.isValid() ? normalizedValue : null;
}
diff --git a/client/app/services/parameters/TextPatternParameter.js b/client/app/services/parameters/TextPatternParameter.js
new file mode 100644
index 0000000000..fe84c00ce8
--- /dev/null
+++ b/client/app/services/parameters/TextPatternParameter.js
@@ -0,0 +1,29 @@
+import { toString, isNull } from "lodash";
+import Parameter from "./Parameter";
+
+class TextPatternParameter extends Parameter {
+ constructor(parameter, parentQueryId) {
+ super(parameter, parentQueryId);
+ this.regex = parameter.regex;
+ this.setValue(parameter.value);
+ }
+
+ // eslint-disable-next-line class-methods-use-this
+ normalizeValue(value) {
+ const normalizedValue = toString(value);
+ if (isNull(normalizedValue)) {
+ return null;
+ }
+
+ var re = new RegExp(this.regex);
+
+ if (re !== null) {
+ if (re.test(normalizedValue)) {
+ return normalizedValue;
+ }
+ }
+ return null;
+ }
+}
+
+export default TextPatternParameter;
diff --git a/client/app/services/parameters/index.js b/client/app/services/parameters/index.js
index 9e1b3fffcb..e34eced08f 100644
--- a/client/app/services/parameters/index.js
+++ b/client/app/services/parameters/index.js
@@ -5,6 +5,7 @@ import EnumParameter from "./EnumParameter";
import QueryBasedDropdownParameter from "./QueryBasedDropdownParameter";
import DateParameter from "./DateParameter";
import DateRangeParameter from "./DateRangeParameter";
+import TextPatternParameter from "./TextPatternParameter";
function createParameter(param, parentQueryId) {
switch (param.type) {
@@ -22,6 +23,8 @@ function createParameter(param, parentQueryId) {
case "datetime-range":
case "datetime-range-with-seconds":
return new DateRangeParameter(param, parentQueryId);
+ case "text-pattern":
+ return new TextPatternParameter({ ...param, type: "text-pattern" }, parentQueryId);
default:
return new TextParameter({ ...param, type: "text" }, parentQueryId);
}
@@ -34,6 +37,7 @@ function cloneParameter(param) {
export {
Parameter,
TextParameter,
+ TextPatternParameter,
NumberParameter,
EnumParameter,
QueryBasedDropdownParameter,
diff --git a/client/app/services/parameters/tests/Parameter.test.js b/client/app/services/parameters/tests/Parameter.test.js
index 3ec7ad7b13..4d504e3165 100644
--- a/client/app/services/parameters/tests/Parameter.test.js
+++ b/client/app/services/parameters/tests/Parameter.test.js
@@ -1,6 +1,7 @@
import {
createParameter,
TextParameter,
+ TextPatternParameter,
NumberParameter,
EnumParameter,
QueryBasedDropdownParameter,
@@ -12,6 +13,7 @@ describe("Parameter", () => {
describe("create", () => {
const parameterTypes = [
["text", TextParameter],
+ ["text-pattern", TextPatternParameter],
["number", NumberParameter],
["enum", EnumParameter],
["query", QueryBasedDropdownParameter],
diff --git a/client/app/services/parameters/tests/TextPatternParameter.test.js b/client/app/services/parameters/tests/TextPatternParameter.test.js
new file mode 100644
index 0000000000..699e320d96
--- /dev/null
+++ b/client/app/services/parameters/tests/TextPatternParameter.test.js
@@ -0,0 +1,21 @@
+import { createParameter } from "..";
+
+describe("TextPatternParameter", () => {
+ let param;
+
+ beforeEach(() => {
+ param = createParameter({ name: "param", title: "Param", type: "text-pattern", regex: "a+" });
+ });
+
+ describe("noramlizeValue", () => {
+ test("converts matching strings", () => {
+ const normalizedValue = param.normalizeValue("art");
+ expect(normalizedValue).toBe("art");
+ });
+
+ test("returns null when string does not match pattern", () => {
+ const normalizedValue = param.normalizeValue("brt");
+ expect(normalizedValue).toBeNull();
+ });
+ });
+});
diff --git a/client/app/services/query-result.js b/client/app/services/query-result.js
index 1a5f12f2a1..03d8de61b6 100644
--- a/client/app/services/query-result.js
+++ b/client/app/services/query-result.js
@@ -113,6 +113,10 @@ export function fetchDataFromJob(jobId, interval = 1000) {
});
}
+export function isDateTime(v) {
+ return isString(v) && moment(v, moment.ISO_8601, true).isValid() && /^\d{4}-\d{2}-\d{2}T/.test(v);
+}
+
class QueryResult {
constructor(props) {
this.deferred = defer();
@@ -147,7 +151,7 @@ class QueryResult {
let newType = null;
if (isNumber(v)) {
newType = "float";
- } else if (isString(v) && v.match(/^\d{4}-\d{2}-\d{2}T/)) {
+ } else if (isDateTime(v)) {
row[k] = moment.utc(v);
newType = "datetime";
} else if (isString(v) && v.match(/^\d{4}-\d{2}-\d{2}$/)) {
@@ -318,6 +322,9 @@ class QueryResult {
}
return v;
});
+ if (filter.values.length > 1 && filter.multiple) {
+ filter.current = filter.values.slice();
+ }
});
return filters;
diff --git a/client/app/services/query-result.test.js b/client/app/services/query-result.test.js
new file mode 100644
index 0000000000..d31e9e1ea1
--- /dev/null
+++ b/client/app/services/query-result.test.js
@@ -0,0 +1,17 @@
+import { isDateTime } from "@/services/query-result";
+
+describe("isDateTime", () => {
+ it.each([
+ ["2022-01-01T00:00:00", true],
+ ["2022-01-01T00:00:00+09:00", true],
+ ["2021-01-27T00:00:01.733983944+03:00 stderr F {", false],
+ ["2021-01-27Z00:00:00+09:00", false],
+ ["2021-01-27", false],
+ ["foo bar", false],
+ [2022, false],
+ [null, false],
+ ["", false],
+ ])("isDateTime('%s'). expected '%s'.", (value, expected) => {
+ expect(isDateTime(value)).toBe(expected);
+ });
+});
diff --git a/client/cypress/cypress.js b/client/cypress/cypress.js
index 44f8010e72..320402f18d 100644
--- a/client/cypress/cypress.js
+++ b/client/cypress/cypress.js
@@ -1,6 +1,5 @@
/* eslint-disable import/no-extraneous-dependencies, no-console */
const { find } = require("lodash");
-const atob = require("atob");
const { execSync } = require("child_process");
const { get, post } = require("request").defaults({ jar: true });
const { seedData } = require("./seed-data");
@@ -13,7 +12,7 @@ try {
cypressConfigBaseUrl = cypressConfig.baseUrl;
} catch (e) {}
-const baseUrl = process.env.CYPRESS_baseUrl || cypressConfigBaseUrl || "http://localhost:5000";
+const baseUrl = process.env.CYPRESS_baseUrl || cypressConfigBaseUrl || "http://localhost:5001";
function seedDatabase(seedValues) {
get(baseUrl + "/login", (_, { headers }) => {
@@ -44,42 +43,32 @@ function seedDatabase(seedValues) {
function buildServer() {
console.log("Building the server...");
- execSync("docker-compose -p cypress build", { stdio: "inherit" });
+ execSync("docker compose -p cypress build", { stdio: "inherit" });
}
function startServer() {
console.log("Starting the server...");
- execSync("docker-compose -p cypress up -d", { stdio: "inherit" });
- execSync("docker-compose -p cypress run server create_db", { stdio: "inherit" });
+ execSync("docker compose -p cypress up -d", { stdio: "inherit" });
+ execSync("docker compose -p cypress run server create_db", { stdio: "inherit" });
}
function stopServer() {
console.log("Stopping the server...");
- execSync("docker-compose -p cypress down", { stdio: "inherit" });
+ execSync("docker compose -p cypress down", { stdio: "inherit" });
}
function runCypressCI() {
const {
- PERCY_TOKEN_ENCODED,
- CYPRESS_PROJECT_ID_ENCODED,
- CYPRESS_RECORD_KEY_ENCODED,
- CIRCLE_REPOSITORY_URL,
+ GITHUB_REPOSITORY,
+ CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
} = process.env;
- if (CIRCLE_REPOSITORY_URL && CIRCLE_REPOSITORY_URL.includes("getredash/redash")) {
- if (PERCY_TOKEN_ENCODED) {
- process.env.PERCY_TOKEN = atob(`${PERCY_TOKEN_ENCODED}`);
- }
- if (CYPRESS_PROJECT_ID_ENCODED) {
- process.env.CYPRESS_PROJECT_ID = atob(`${CYPRESS_PROJECT_ID_ENCODED}`);
- }
- if (CYPRESS_RECORD_KEY_ENCODED) {
- process.env.CYPRESS_RECORD_KEY = atob(`${CYPRESS_RECORD_KEY_ENCODED}`);
- }
+ if (GITHUB_REPOSITORY === "getredash/redash") {
+ process.env.CYPRESS_OPTIONS = "--record";
}
execSync(
- "COMMIT_INFO_MESSAGE=$(git show -s --format=%s) docker-compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run --record",
+ "COMMIT_INFO_MESSAGE=$(git show -s --format=%s) docker compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
{ stdio: "inherit" }
);
}
@@ -118,6 +107,6 @@ switch (command) {
stopServer();
break;
default:
- console.log("Usage: npm run cypress [build|start|db-seed|open|run|stop]");
+ console.log("Usage: yarn cypress [build|start|db-seed|open|run|stop]");
break;
}
diff --git a/client/cypress/integration/dashboard/dashboard_list.js b/client/cypress/integration/dashboard/dashboard_list.js
new file mode 100644
index 0000000000..3573f68dc4
--- /dev/null
+++ b/client/cypress/integration/dashboard/dashboard_list.js
@@ -0,0 +1,24 @@
+describe("Dashboard list sort", () => {
+ beforeEach(() => {
+ cy.login();
+ });
+
+ it("creates one dashboard", () => {
+ cy.visit("/dashboards");
+ cy.getByTestId("CreateButton").click();
+ cy.getByTestId("CreateDashboardMenuItem").click();
+ cy.getByTestId("CreateDashboardDialog").within(() => {
+ cy.get("input").type("A Foo Bar");
+ cy.getByTestId("DashboardSaveButton").click();
+ });
+ });
+
+ describe("Sorting table does not crash page ", () => {
+ it("sorts", () => {
+ cy.visit("/dashboards");
+ cy.contains("Name").click();
+ cy.wait(1000); // eslint-disable-line cypress/no-unnecessary-waiting
+ cy.getByTestId("ErrorMessage").should("not.exist");
+ });
+ });
+});
diff --git a/client/cypress/integration/dashboard/parameter_spec.js b/client/cypress/integration/dashboard/parameter_spec.js
index dd62e773d4..0d3cc7a621 100644
--- a/client/cypress/integration/dashboard/parameter_spec.js
+++ b/client/cypress/integration/dashboard/parameter_spec.js
@@ -1,5 +1,4 @@
-import { createQueryAndAddWidget, editDashboard } from "../../support/dashboard";
-import { dragParam, expectParamOrder } from "../../support/parameters";
+import { createQueryAndAddWidget } from "../../support/dashboard";
describe("Dashboard Parameters", () => {
const parameters = [
@@ -59,16 +58,6 @@ describe("Dashboard Parameters", () => {
});
};
- const setWidgetParametersToDashboard = parameters => {
- cy.wrap(parameters).each(({ name: paramName }, i) => {
- cy.getByTestId(`EditParamMappingButton-${paramName}`).click();
- cy.getByTestId("NewDashboardParameterOption")
- .filter(":visible")
- .click();
- return saveMappingOptions(i === parameters.length - 1);
- });
- };
-
it("supports widget parameters", function() {
// widget parameter mapping is the default for the API
cy.getByTestId(this.widgetTestId).within(() => {
@@ -86,27 +75,6 @@ describe("Dashboard Parameters", () => {
cy.getByTestId("DashboardParameters").should("not.exist");
});
- it("supports dashboard parameters", function() {
- openMappingOptions(this.widgetTestId);
- setWidgetParametersToDashboard(parameters);
-
- cy.getByTestId(this.widgetTestId).within(() => {
- cy.getByTestId("ParameterName-param1").should("not.exist");
- });
-
- cy.getByTestId("DashboardParameters").within(() => {
- cy.getByTestId("ParameterName-param1")
- .find("input")
- .type("{selectall}DashboardParam");
-
- cy.getByTestId("ParameterApplyButton").click();
- });
-
- cy.getByTestId(this.widgetTestId).within(() => {
- cy.getByTestId("TableVisualization").should("contain", "DashboardParam");
- });
- });
-
it("supports static values for parameters", function() {
openMappingOptions(this.widgetTestId);
cy.getByTestId("EditParamMappingButton-param1").click();
@@ -131,34 +99,4 @@ describe("Dashboard Parameters", () => {
cy.getByTestId("TableVisualization").should("contain", "StaticValue");
});
});
-
- it("reorders parameters", function() {
- // Reorder is only available in edit mode
- editDashboard();
-
- const [param1, param2] = parameters;
-
- cy.getByTestId("ParameterBlock-param1")
- .invoke("width")
- .then(paramWidth => {
- cy.server();
- cy.route("POST", `**/api/dashboards/*`).as("SaveDashboard");
- cy.route("POST", `**/api/widgets/*`).as("SaveWidget");
-
- // Asserts widget param order
- dragParam(param1.name, paramWidth, 1);
- cy.wait("@SaveWidget");
- cy.reload();
- expectParamOrder([param2.title, param1.title]);
-
- // Asserts dashboard param order
- openMappingOptions(this.widgetTestId);
- setWidgetParametersToDashboard(parameters);
- cy.wait("@SaveWidget");
- dragParam(param1.name, paramWidth, 1);
- cy.wait("@SaveDashboard");
- cy.reload();
- expectParamOrder([param2.title, param1.title]);
- });
- });
});
diff --git a/client/cypress/integration/dashboard/textbox_spec.js b/client/cypress/integration/dashboard/textbox_spec.js
index 0c8463587e..669e73e912 100644
--- a/client/cypress/integration/dashboard/textbox_spec.js
+++ b/client/cypress/integration/dashboard/textbox_spec.js
@@ -141,7 +141,7 @@ describe("Textbox", () => {
})
.should($el => {
const { top, left } = $el.offset();
- expect(top).to.eq(162);
+ expect(top).to.be.oneOf([162, 162.015625]);
expect(left).to.eq(282);
expect($el.width()).to.eq(545);
expect($el.height()).to.eq(185);
diff --git a/client/cypress/integration/dashboard/widget_spec.js b/client/cypress/integration/dashboard/widget_spec.js
index 6da0e335cb..2c77fff832 100644
--- a/client/cypress/integration/dashboard/widget_spec.js
+++ b/client/cypress/integration/dashboard/widget_spec.js
@@ -177,7 +177,7 @@ describe("Widget", () => {
cy.visit(this.dashboardUrl);
cy.getByTestId("TableVisualization")
.its("0.offsetHeight")
- .should("eq", 381);
+ .should("be.oneOf", [380, 381]);
cy.percySnapshot("Shows correct height of table visualization");
});
});
diff --git a/client/cypress/integration/data-source/create_data_source_spec.js b/client/cypress/integration/data-source/create_data_source_spec.js
index 0453dd5d62..1549cef6b1 100644
--- a/client/cypress/integration/data-source/create_data_source_spec.js
+++ b/client/cypress/integration/data-source/create_data_source_spec.js
@@ -47,7 +47,7 @@ describe("Create Data Source", () => {
cy.getByTestId("User").type("postgres");
cy.getByTestId("Password").type("postgres");
cy.getByTestId("Database Name").type("postgres{enter}");
- cy.getByTestId("CreateSourceSaveButton").click();
+ cy.getByTestId("CreateSourceSaveButton").click({ force: true });
cy.contains("Saved.");
});
diff --git a/client/cypress/integration/destination/create_destination_spec.js b/client/cypress/integration/destination/create_destination_spec.js
index 4629ec76c0..9e0ff79fce 100644
--- a/client/cypress/integration/destination/create_destination_spec.js
+++ b/client/cypress/integration/destination/create_destination_spec.js
@@ -15,7 +15,7 @@ describe("Create Destination", () => {
cy.getByTestId("PreviewItem")
.then($previewItems => Cypress.$.map($previewItems, item => Cypress.$(item).attr("data-test-type")))
- .then(availableTypes => expect(availableTypes).not.to.contain.members(this.deprecatedTypes));
+ .then(availableTypes => expect(availableTypes).not.to.contain.oneOf(this.deprecatedTypes));
cy.getByTestId("CreateSourceDialog").should("contain", "Email");
cy.wait(1000); // eslint-disable-line cypress/no-unnecessary-waiting
diff --git a/client/cypress/integration/query/filters_spec.js b/client/cypress/integration/query/filters_spec.js
index 219743ac40..50262738db 100644
--- a/client/cypress/integration/query/filters_spec.js
+++ b/client/cypress/integration/query/filters_spec.js
@@ -68,29 +68,45 @@ describe("Query Filters", () => {
}
it("filters rows in a Table Visualization", () => {
- expectSelectedOptionsToHaveMembers(["a"]);
- expectTableToHaveLength(4);
- expectFirstColumnToHaveMembers(["a", "a", "a", "a"]);
+ // Defaults to All Options Selected
+
+ expectSelectedOptionsToHaveMembers(["a", "b", "c"]);
+ expectTableToHaveLength(11);
+ expectFirstColumnToHaveMembers(["a", "a", "a", "a", "b", "b", "b", "c", "c", "c", "c"]);
+
+ // Clear Option
cy.getByTestId("FilterName-stage1::multi-filter")
.find(".ant-select-selector")
.click();
- cy.contains(".ant-select-item-option-content", "b").click();
+ cy.getByTestId("ClearOption").click();
cy.getByTestId("FilterName-stage1::multi-filter").click(); // close dropdown
- expectSelectedOptionsToHaveMembers(["a", "b"]);
- expectTableToHaveLength(7);
- expectFirstColumnToHaveMembers(["a", "a", "a", "a", "b", "b", "b"]);
+ cy.getByTestId("TableVisualization").should("not.exist");
- // Clear Option
+ // Single Option selected
cy.getByTestId("FilterName-stage1::multi-filter")
.find(".ant-select-selector")
.click();
- cy.getByTestId("ClearOption").click();
+ cy.contains(".ant-select-item-option-grouped > .ant-select-item-option-content", "a").click();
cy.getByTestId("FilterName-stage1::multi-filter").click(); // close dropdown
- cy.getByTestId("TableVisualization").should("not.exist");
+ expectSelectedOptionsToHaveMembers(["a"]);
+ expectTableToHaveLength(4);
+ expectFirstColumnToHaveMembers(["a", "a", "a", "a"]);
+
+ // Two Options selected
+
+ cy.getByTestId("FilterName-stage1::multi-filter")
+ .find(".ant-select-selector")
+ .click();
+ cy.contains(".ant-select-item-option-content", "b").click();
+ cy.getByTestId("FilterName-stage1::multi-filter").click(); // close dropdown
+
+ expectSelectedOptionsToHaveMembers(["a", "b"]);
+ expectTableToHaveLength(7);
+ expectFirstColumnToHaveMembers(["a", "a", "a", "a", "b", "b", "b"]);
// Select All Option
diff --git a/client/cypress/integration/query/parameter_spec.js b/client/cypress/integration/query/parameter_spec.js
index 0b4cc9f60c..dd26163591 100644
--- a/client/cypress/integration/query/parameter_spec.js
+++ b/client/cypress/integration/query/parameter_spec.js
@@ -1,16 +1,15 @@
import { dragParam } from "../../support/parameters";
+import dayjs from "dayjs";
function openAndSearchAntdDropdown(testId, paramOption) {
- cy.getByTestId(testId)
- .find(".ant-select-selection-search-input")
- .type(paramOption, { force: true });
+ cy.getByTestId(testId).find(".ant-select-selection-search-input").type(paramOption, { force: true });
}
describe("Parameter", () => {
- const expectDirtyStateChange = edit => {
+ const expectDirtyStateChange = (edit) => {
cy.getByTestId("ParameterName-test-parameter")
.find(".parameter-input")
- .should($el => {
+ .should(($el) => {
assert.isUndefined($el.data("dirty"));
});
@@ -18,7 +17,7 @@ describe("Parameter", () => {
cy.getByTestId("ParameterName-test-parameter")
.find(".parameter-input")
- .should($el => {
+ .should(($el) => {
assert.isTrue($el.data("dirty"));
});
};
@@ -41,9 +40,7 @@ describe("Parameter", () => {
});
it("updates the results after clicking Apply", () => {
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .type("Redash");
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("Redash");
cy.getByTestId("ParameterApplyButton").click();
@@ -52,13 +49,66 @@ describe("Parameter", () => {
it("sets dirty state when edited", () => {
expectDirtyStateChange(() => {
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .type("Redash");
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("Redash");
});
});
});
+ describe("Text Pattern Parameter", () => {
+ beforeEach(() => {
+ const queryData = {
+ name: "Text Pattern Parameter",
+ query: "SELECT '{{test-parameter}}' AS parameter",
+ options: {
+ parameters: [{ name: "test-parameter", title: "Test Parameter", type: "text-pattern", regex: "a.*a" }],
+ },
+ };
+
+ cy.createQuery(queryData, false).then(({ id }) => cy.visit(`/queries/${id}/source`));
+ });
+
+ it("updates the results after clicking Apply", () => {
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
+
+ cy.getByTestId("ParameterApplyButton").click();
+
+ cy.getByTestId("TableVisualization").should("contain", "arta");
+
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arounda");
+
+ cy.getByTestId("ParameterApplyButton").click();
+
+ cy.getByTestId("TableVisualization").should("contain", "arounda");
+ });
+
+ it("throws error message with invalid query request", () => {
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
+
+ cy.getByTestId("ParameterApplyButton").click();
+
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}abcab");
+
+ cy.getByTestId("ParameterApplyButton").click();
+
+ cy.getByTestId("QueryExecutionStatus").should("exist");
+ });
+
+ it("sets dirty state when edited", () => {
+ expectDirtyStateChange(() => {
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
+ });
+ });
+
+ it("doesn't let user save invalid regex", () => {
+ cy.get(".fa-cog").click();
+ cy.getByTestId("RegexPatternInput").type("{selectall}[");
+ cy.contains("Invalid Regex Pattern").should("exist");
+ cy.getByTestId("SaveParameterSettings").click();
+ cy.get(".fa-cog").click();
+ cy.getByTestId("RegexPatternInput").should("not.equal", "[");
+ });
+ });
+
describe("Number Parameter", () => {
beforeEach(() => {
const queryData = {
@@ -73,17 +123,13 @@ describe("Parameter", () => {
});
it("updates the results after clicking Apply", () => {
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .type("{selectall}42");
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}42");
cy.getByTestId("ParameterApplyButton").click();
cy.getByTestId("TableVisualization").should("contain", 42);
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .type("{selectall}31415");
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}31415");
cy.getByTestId("ParameterApplyButton").click();
@@ -92,9 +138,7 @@ describe("Parameter", () => {
it("sets dirty state when edited", () => {
expectDirtyStateChange(() => {
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .type("{selectall}42");
+ cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}42");
});
});
});
@@ -118,10 +162,7 @@ describe("Parameter", () => {
openAndSearchAntdDropdown("ParameterName-test-parameter", "value2"); // asserts option filter prop
// only the filtered option should be on the DOM
- cy.get(".ant-select-item-option")
- .should("have.length", 1)
- .and("contain", "value2")
- .click();
+ cy.get(".ant-select-item-option").should("have.length", 1).and("contain", "value2").click();
cy.getByTestId("ParameterApplyButton").click();
// ensure that query is being executed
@@ -139,12 +180,10 @@ describe("Parameter", () => {
SaveParameterSettings
`);
- cy.getByTestId("ParameterName-test-parameter")
- .find(".ant-select-selection-search")
- .click();
+ cy.getByTestId("ParameterName-test-parameter").find(".ant-select-selection-search").click();
// select all unselected options
- cy.get(".ant-select-item-option").each($option => {
+ cy.get(".ant-select-item-option").each(($option) => {
if (!$option.hasClass("ant-select-item-option-selected")) {
cy.wrap($option).click();
}
@@ -159,9 +198,7 @@ describe("Parameter", () => {
it("sets dirty state when edited", () => {
expectDirtyStateChange(() => {
- cy.getByTestId("ParameterName-test-parameter")
- .find(".ant-select")
- .click();
+ cy.getByTestId("ParameterName-test-parameter").find(".ant-select").click();
cy.contains(".ant-select-item-option", "value2").click();
});
@@ -175,7 +212,7 @@ describe("Parameter", () => {
name: "Dropdown Query",
query: "",
};
- cy.createQuery(dropdownQueryData, true).then(dropdownQuery => {
+ cy.createQuery(dropdownQueryData, true).then((dropdownQuery) => {
const queryData = {
name: "Query Based Dropdown Parameter",
query: "SELECT '{{test-parameter}}' AS parameter",
@@ -207,7 +244,7 @@ describe("Parameter", () => {
SELECT 'value2' AS name, 2 AS value UNION ALL
SELECT 'value3' AS name, 3 AS value`,
};
- cy.createQuery(dropdownQueryData, true).then(dropdownQuery => {
+ cy.createQuery(dropdownQueryData, true).then((dropdownQuery) => {
const queryData = {
name: "Query Based Dropdown Parameter",
query: "SELECT '{{test-parameter}}' AS parameter",
@@ -233,10 +270,7 @@ describe("Parameter", () => {
openAndSearchAntdDropdown("ParameterName-test-parameter", "value2"); // asserts option filter prop
// only the filtered option should be on the DOM
- cy.get(".ant-select-item-option")
- .should("have.length", 1)
- .and("contain", "value2")
- .click();
+ cy.get(".ant-select-item-option").should("have.length", 1).and("contain", "value2").click();
cy.getByTestId("ParameterApplyButton").click();
// ensure that query is being executed
@@ -254,12 +288,10 @@ describe("Parameter", () => {
SaveParameterSettings
`);
- cy.getByTestId("ParameterName-test-parameter")
- .find(".ant-select")
- .click();
+ cy.getByTestId("ParameterName-test-parameter").find(".ant-select").click();
// make sure all options are unselected and select all
- cy.get(".ant-select-item-option").each($option => {
+ cy.get(".ant-select-item-option").each(($option) => {
expect($option).not.to.have.class("ant-select-dropdown-menu-item-selected");
cy.wrap($option).click();
});
@@ -273,14 +305,10 @@ describe("Parameter", () => {
});
});
- const selectCalendarDate = date => {
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .click();
+ const selectCalendarDate = (date) => {
+ cy.getByTestId("ParameterName-test-parameter").find("input").click();
- cy.get(".ant-picker-panel")
- .contains(".ant-picker-cell-inner", date)
- .click();
+ cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", date).click();
};
describe("Date Parameter", () => {
@@ -302,27 +330,25 @@ describe("Parameter", () => {
});
afterEach(() => {
- cy.clock().then(clock => clock.restore());
+ cy.clock().then((clock) => clock.restore());
});
- it("updates the results after selecting a date", function() {
+ it("updates the results after selecting a date", function () {
selectCalendarDate("15");
cy.getByTestId("ParameterApplyButton").click();
- cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("15/MM/YY"));
+ cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("15/MM/YY"));
});
- it("allows picking a dynamic date", function() {
+ it("allows picking a dynamic date", function () {
cy.getByTestId("DynamicButton").click();
- cy.getByTestId("DynamicButtonMenu")
- .contains("Today/Now")
- .click();
+ cy.getByTestId("DynamicButtonMenu").contains("Today/Now").click();
cy.getByTestId("ParameterApplyButton").click();
- cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("DD/MM/YY"));
+ cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("DD/MM/YY"));
});
it("sets dirty state when edited", () => {
@@ -349,14 +375,11 @@ describe("Parameter", () => {
});
afterEach(() => {
- cy.clock().then(clock => clock.restore());
+ cy.clock().then((clock) => clock.restore());
});
- it("updates the results after selecting a date and clicking in ok", function() {
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .as("Input")
- .click();
+ it("updates the results after selecting a date and clicking in ok", function () {
+ cy.getByTestId("ParameterName-test-parameter").find("input").as("Input").click();
selectCalendarDate("15");
@@ -364,63 +387,45 @@ describe("Parameter", () => {
cy.getByTestId("ParameterApplyButton").click();
- cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("YYYY-MM-15 HH:mm"));
+ cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-15 HH:mm"));
});
- it("shows the current datetime after clicking in Now", function() {
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .as("Input")
- .click();
+ it("shows the current datetime after clicking in Now", function () {
+ cy.getByTestId("ParameterName-test-parameter").find("input").as("Input").click();
- cy.get(".ant-picker-panel")
- .contains("Now")
- .click();
+ cy.get(".ant-picker-panel").contains("Now").click();
cy.getByTestId("ParameterApplyButton").click();
- cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("YYYY-MM-DD HH:mm"));
+ cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-DD HH:mm"));
});
- it("allows picking a dynamic date", function() {
+ it("allows picking a dynamic date", function () {
cy.getByTestId("DynamicButton").click();
- cy.getByTestId("DynamicButtonMenu")
- .contains("Today/Now")
- .click();
+ cy.getByTestId("DynamicButtonMenu").contains("Today/Now").click();
cy.getByTestId("ParameterApplyButton").click();
- cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("YYYY-MM-DD HH:mm"));
+ cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-DD HH:mm"));
});
it("sets dirty state when edited", () => {
expectDirtyStateChange(() => {
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .click();
+ cy.getByTestId("ParameterName-test-parameter").find("input").click();
- cy.get(".ant-picker-panel")
- .contains("Now")
- .click();
+ cy.get(".ant-picker-panel").contains("Now").click();
});
});
});
describe("Date Range Parameter", () => {
const selectCalendarDateRange = (startDate, endDate) => {
- cy.getByTestId("ParameterName-test-parameter")
- .find("input")
- .first()
- .click();
-
- cy.get(".ant-picker-panel")
- .contains(".ant-picker-cell-inner", startDate)
- .click();
-
- cy.get(".ant-picker-panel")
- .contains(".ant-picker-cell-inner", endDate)
- .click();
+ cy.getByTestId("ParameterName-test-parameter").find("input").first().click();
+
+ cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", startDate).click();
+
+ cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", endDate).click();
};
beforeEach(() => {
@@ -441,31 +446,29 @@ describe("Parameter", () => {
});
afterEach(() => {
- cy.clock().then(clock => clock.restore());
+ cy.clock().then((clock) => clock.restore());
});
- it("updates the results after selecting a date range", function() {
+ it("updates the results after selecting a date range", function () {
selectCalendarDateRange("15", "20");
cy.getByTestId("ParameterApplyButton").click();
- const now = Cypress.moment(this.now);
+ const now = dayjs(this.now);
cy.getByTestId("TableVisualization").should(
"contain",
now.format("YYYY-MM-15") + " - " + now.format("YYYY-MM-20")
);
});
- it("allows picking a dynamic date range", function() {
+ it("allows picking a dynamic date range", function () {
cy.getByTestId("DynamicButton").click();
- cy.getByTestId("DynamicButtonMenu")
- .contains("Last month")
- .click();
+ cy.getByTestId("DynamicButtonMenu").contains("Last month").click();
cy.getByTestId("ParameterApplyButton").click();
- const lastMonth = Cypress.moment(this.now).subtract(1, "month");
+ const lastMonth = dayjs(this.now).subtract(1, "month");
cy.getByTestId("TableVisualization").should(
"contain",
lastMonth.startOf("month").format("YYYY-MM-DD") + " - " + lastMonth.endOf("month").format("YYYY-MM-DD")
@@ -478,15 +481,10 @@ describe("Parameter", () => {
});
describe("Apply Changes", () => {
- const expectAppliedChanges = apply => {
- cy.getByTestId("ParameterName-test-parameter-1")
- .find("input")
- .as("Input")
- .type("Redash");
+ const expectAppliedChanges = (apply) => {
+ cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Input").type("Redash");
- cy.getByTestId("ParameterName-test-parameter-2")
- .find("input")
- .type("Redash");
+ cy.getByTestId("ParameterName-test-parameter-2").find("input").type("Redash");
cy.location("search").should("not.contain", "Redash");
@@ -522,10 +520,7 @@ describe("Parameter", () => {
it("shows and hides according to parameter dirty state", () => {
cy.getByTestId("ParameterApplyButton").should("not.be", "visible");
- cy.getByTestId("ParameterName-test-parameter-1")
- .find("input")
- .as("Param")
- .type("Redash");
+ cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Param").type("Redash");
cy.getByTestId("ParameterApplyButton").should("be.visible");
@@ -535,21 +530,13 @@ describe("Parameter", () => {
});
it("updates dirty counter", () => {
- cy.getByTestId("ParameterName-test-parameter-1")
- .find("input")
- .type("Redash");
+ cy.getByTestId("ParameterName-test-parameter-1").find("input").type("Redash");
- cy.getByTestId("ParameterApplyButton")
- .find(".ant-badge-count p.current")
- .should("contain", "1");
+ cy.getByTestId("ParameterApplyButton").find(".ant-badge-count p.current").should("contain", "1");
- cy.getByTestId("ParameterName-test-parameter-2")
- .find("input")
- .type("Redash");
+ cy.getByTestId("ParameterName-test-parameter-2").find("input").type("Redash");
- cy.getByTestId("ParameterApplyButton")
- .find(".ant-badge-count p.current")
- .should("contain", "2");
+ cy.getByTestId("ParameterApplyButton").find(".ant-badge-count p.current").should("contain", "2");
});
it('applies changes from "Apply Changes" button', () => {
@@ -559,16 +546,13 @@ describe("Parameter", () => {
});
it('applies changes from "alt+enter" keyboard shortcut', () => {
- expectAppliedChanges(input => {
+ expectAppliedChanges((input) => {
input.type("{alt}{enter}");
});
});
it('disables "Execute" button', () => {
- cy.getByTestId("ParameterName-test-parameter-1")
- .find("input")
- .as("Input")
- .type("Redash");
+ cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Input").type("Redash");
cy.getByTestId("ExecuteButton").should("be.disabled");
cy.get("@Input").clear();
@@ -593,15 +577,12 @@ describe("Parameter", () => {
cy.createQuery(queryData, false).then(({ id }) => cy.visit(`/queries/${id}/source`));
- cy.get(".parameter-block")
- .first()
- .invoke("width")
- .as("paramWidth");
+ cy.get(".parameter-block").first().invoke("width").as("paramWidth");
cy.get("body").type("{alt}D"); // hide schema browser
});
- it("is possible to rearrange parameters", function() {
+ it("is possible to rearrange parameters", function () {
cy.server();
cy.route("POST", "**/api/queries/*").as("QuerySave");
diff --git a/client/cypress/integration/visualizations/chart_spec.js b/client/cypress/integration/visualizations/chart_spec.js
index 830023ce56..cb3795fdfc 100644
--- a/client/cypress/integration/visualizations/chart_spec.js
+++ b/client/cypress/integration/visualizations/chart_spec.js
@@ -26,33 +26,33 @@ const SQL = `
describe("Chart", () => {
beforeEach(() => {
cy.login();
- cy.createQuery({ name: "Chart Visualization", query: SQL })
- .its("id")
- .as("queryId");
+ cy.createQuery({ name: "Chart Visualization", query: SQL }).its("id").as("queryId");
});
- it("creates Bar charts", function() {
+ it("creates Bar charts", function () {
cy.visit(`queries/${this.queryId}/source`);
cy.getByTestId("ExecuteButton").click();
- const getBarChartAssertionFunction = (specificBarChartAssertionFn = () => {}) => () => {
- // checks for TabbedEditor standard tabs
- assertTabbedEditor();
+ const getBarChartAssertionFunction =
+ (specificBarChartAssertionFn = () => {}) =>
+ () => {
+ // checks for TabbedEditor standard tabs
+ assertTabbedEditor();
- // standard chart should be bar
- cy.getByTestId("Chart.GlobalSeriesType").contains(".ant-select-selection-item", "Bar");
+ // standard chart should be bar
+ cy.getByTestId("Chart.GlobalSeriesType").contains(".ant-select-selection-item", "Bar");
- // checks the plot canvas exists and is empty
- assertPlotPreview("not.exist");
+ // checks the plot canvas exists and is empty
+ assertPlotPreview("not.exist");
- // creates a chart and checks it is plotted
- cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
- cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
- cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value2");
- assertPlotPreview("exist");
+ // creates a chart and checks it is plotted
+ cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
+ cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
+ cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value2");
+ assertPlotPreview("exist");
- specificBarChartAssertionFn();
- };
+ specificBarChartAssertionFn();
+ };
const chartTests = [
{
@@ -95,8 +95,8 @@ describe("Chart", () => {
const withDashboardWidgetsAssertionFn = (widgetGetters, dashboardUrl) => {
cy.visit(dashboardUrl);
- widgetGetters.forEach(widgetGetter => {
- cy.get(`@${widgetGetter}`).then(widget => {
+ widgetGetters.forEach((widgetGetter) => {
+ cy.get(`@${widgetGetter}`).then((widget) => {
cy.getByTestId(getWidgetTestId(widget)).within(() => {
cy.get("g.points").should("exist");
});
@@ -107,4 +107,34 @@ describe("Chart", () => {
createDashboardWithCharts("Bar chart visualizations", chartGetters, withDashboardWidgetsAssertionFn);
cy.percySnapshot("Visualizations - Charts - Bar");
});
+ it("colors Bar charts", function () {
+ cy.visit(`queries/${this.queryId}/source`);
+ cy.getByTestId("ExecuteButton").click();
+ cy.getByTestId("NewVisualization").click();
+ cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
+ cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
+ cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
+ cy.getByTestId("ColorScheme").click();
+ cy.getByTestId("ColorOptionViridis").click();
+ cy.getByTestId("ColorScheme").click();
+ cy.getByTestId("ColorOptionTableau 10").click();
+ cy.getByTestId("ColorScheme").click();
+ cy.getByTestId("ColorOptionD3 Category 10").click();
+ });
+ it("colors Pie charts", function () {
+ cy.visit(`queries/${this.queryId}/source`);
+ cy.getByTestId("ExecuteButton").click();
+ cy.getByTestId("NewVisualization").click();
+ cy.getByTestId("Chart.GlobalSeriesType").click();
+ cy.getByTestId("Chart.ChartType.pie").click();
+ cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
+ cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
+ cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
+ cy.getByTestId("ColorScheme").click();
+ cy.getByTestId("ColorOptionViridis").click();
+ cy.getByTestId("ColorScheme").click();
+ cy.getByTestId("ColorOptionTableau 10").click();
+ cy.getByTestId("ColorScheme").click();
+ cy.getByTestId("ColorOptionD3 Category 10").click();
+ });
});
diff --git a/client/cypress/integration/visualizations/pivot_spec.js b/client/cypress/integration/visualizations/pivot_spec.js
index c0622b6158..ad10bc2a16 100644
--- a/client/cypress/integration/visualizations/pivot_spec.js
+++ b/client/cypress/integration/visualizations/pivot_spec.js
@@ -99,8 +99,14 @@ describe("Pivot", () => {
.focus()
.type(" UNION ALL {enter}SELECT 'c' AS stage1, 'c5' AS stage2, 55 AS value");
+ // wait for the query text change to propagate (it's debounced in QuerySource.jsx)
+ // eslint-disable-next-line cypress/no-unnecessary-waiting
+ cy.wait(200);
+
cy.getByTestId("SaveButton").click();
- cy.getByTestId("ExecuteButton").click();
+ cy.getByTestId("ExecuteButton")
+ .should("be.enabled")
+ .click();
// assert number of rows is 12
cy.getByTestId("PivotTableVisualization").contains(".pvtGrandTotal", "12");
diff --git a/client/cypress/integration/visualizations/table/table_spec.js b/client/cypress/integration/visualizations/table/table_spec.js
index b0fa8ec61c..3191bc82b3 100644
--- a/client/cypress/integration/visualizations/table/table_spec.js
+++ b/client/cypress/integration/visualizations/table/table_spec.js
@@ -22,10 +22,7 @@ function prepareVisualization(query, type, name, options) {
cy.get("body").type("{alt}D");
// do some pre-checks here to ensure that visualization was created and is visible
- cy.getByTestId("TableVisualization")
- .should("exist")
- .find("table")
- .should("exist");
+ cy.getByTestId("TableVisualization").should("exist").find("table").should("exist");
return cy.then(() => ({ queryId, visualizationId }));
});
@@ -53,7 +50,7 @@ describe("Table", () => {
});
describe("Sorting data", () => {
- beforeEach(function() {
+ beforeEach(function () {
const { query, config } = MultiColumnSort;
prepareVisualization(query, "TABLE", "Sort data", config).then(({ queryId, visualizationId }) => {
this.queryId = queryId;
@@ -61,39 +58,22 @@ describe("Table", () => {
});
});
- it("sorts data by a single column", function() {
- cy.getByTestId("TableVisualization")
- .find("table th")
- .contains("c")
- .should("exist")
- .click();
+ it("sorts data by a single column", function () {
+ cy.getByTestId("TableVisualization").find("table th").contains("c").should("exist").click();
cy.percySnapshot("Visualizations - Table (Single-column sort)", { widths: [viewportWidth] });
});
- it("sorts data by a multiple columns", function() {
- cy.getByTestId("TableVisualization")
- .find("table th")
- .contains("a")
- .should("exist")
- .click();
+ it("sorts data by a multiple columns", function () {
+ cy.getByTestId("TableVisualization").find("table th").contains("a").should("exist").click();
cy.get("body").type("{shift}", { release: false });
- cy.getByTestId("TableVisualization")
- .find("table th")
- .contains("b")
- .should("exist")
- .click();
+ cy.getByTestId("TableVisualization").find("table th").contains("b").should("exist").click();
cy.percySnapshot("Visualizations - Table (Multi-column sort)", { widths: [viewportWidth] });
});
- it("sorts data in reverse order", function() {
- cy.getByTestId("TableVisualization")
- .find("table th")
- .contains("c")
- .should("exist")
- .click()
- .click();
+ it("sorts data in reverse order", function () {
+ cy.getByTestId("TableVisualization").find("table th").contains("c").should("exist").click().click();
cy.percySnapshot("Visualizations - Table (Single-column reverse sort)", { widths: [viewportWidth] });
});
});
@@ -101,10 +81,7 @@ describe("Table", () => {
it("searches in multiple columns", () => {
const { query, config } = SearchInData;
prepareVisualization(query, "TABLE", "Search", config).then(({ visualizationId }) => {
- cy.getByTestId("TableVisualization")
- .find("table input")
- .should("exist")
- .type("test");
+ cy.getByTestId("TableVisualization").find("table input").should("exist").type("test");
cy.percySnapshot("Visualizations - Table (Search in data)", { widths: [viewportWidth] });
});
});
diff --git a/client/cypress/plugins/index.js b/client/cypress/plugins/index.js
deleted file mode 100644
index a11c0619e7..0000000000
--- a/client/cypress/plugins/index.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const percyHealthCheck = require("@percy/cypress/task"); // eslint-disable-line import/no-extraneous-dependencies, import/no-unresolved
-
-module.exports = (on, config) => {
- if (config.env.coverage) {
- require("@cypress/code-coverage/task")(on, config);
- }
- on("task", percyHealthCheck);
-
- return config;
-};
diff --git a/client/cypress/support/redash-api/index.js b/client/cypress/support/redash-api/index.js
index e9bc0ace69..2204f381c2 100644
--- a/client/cypress/support/redash-api/index.js
+++ b/client/cypress/support/redash-api/index.js
@@ -2,12 +2,12 @@
const { extend, get, merge, find } = Cypress._;
-const post = options =>
+const post = (options) =>
cy
.getCookie("csrf_token")
- .then(csrf => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
+ .then((csrf) => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
-Cypress.Commands.add("createDashboard", name => {
+Cypress.Commands.add("createDashboard", (name) => {
return post({ url: "api/dashboards", body: { name } }).then(({ body }) => body);
});
@@ -28,7 +28,7 @@ Cypress.Commands.add("createQuery", (data, shouldPublish = true) => {
// eslint-disable-next-line cypress/no-assigning-return-values
let request = post({ url: "/api/queries", body: merged }).then(({ body }) => body);
if (shouldPublish) {
- request = request.then(query =>
+ request = request.then((query) =>
post({ url: `/api/queries/${query.id}`, body: { is_draft: false } }).then(() => query)
);
}
@@ -86,6 +86,7 @@ Cypress.Commands.add("addWidget", (dashboardId, visualizationId, options = {}) =
Cypress.Commands.add("createAlert", (queryId, options = {}, name) => {
const defaultOptions = {
column: "?column?",
+ selector: "first",
op: "greater than",
rearm: 0,
value: 1,
@@ -109,7 +110,7 @@ Cypress.Commands.add("createUser", ({ name, email, password }) => {
url: "api/users?no_invite=yes",
body: { name, email },
failOnStatusCode: false,
- }).then(xhr => {
+ }).then((xhr) => {
const { status, body } = xhr;
if (status < 200 || status > 400) {
throw new Error(xhr);
@@ -146,7 +147,7 @@ Cypress.Commands.add("getDestinations", () => {
Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) => {
return cy
.getDestinations()
- .then(destinations => {
+ .then((destinations) => {
const destination = find(destinations, { name: destinationName });
if (!destination) {
throw new Error("Destination not found");
@@ -166,6 +167,6 @@ Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) =>
});
});
-Cypress.Commands.add("updateOrgSettings", settings => {
+Cypress.Commands.add("updateOrgSettings", (settings) => {
return post({ url: "api/settings/organization", body: settings }).then(({ body }) => body);
});
diff --git a/client/cypress/support/visualizations/chart.js b/client/cypress/support/visualizations/chart.js
index a18375daee..e6d7d8b585 100644
--- a/client/cypress/support/visualizations/chart.js
+++ b/client/cypress/support/visualizations/chart.js
@@ -80,6 +80,10 @@ export function assertAxesAndAddLabels(xaxisLabel, yaxisLabel) {
.clear()
.type(yaxisLabel);
+ cy.getByTestId("Chart.LeftYAxis.TickFormat")
+ .clear()
+ .type("+");
+
cy.getByTestId("VisualizationEditor.Tabs.General").click();
}
diff --git a/client/cypress/support/visualizations/table.js b/client/cypress/support/visualizations/table.js
index 2095b9fb88..42645ed270 100644
--- a/client/cypress/support/visualizations/table.js
+++ b/client/cypress/support/visualizations/table.js
@@ -1,12 +1,10 @@
export function expectTableToHaveLength(length) {
- cy.getByTestId("TableVisualization")
- .find("tbody tr")
- .should("have.length", length);
+ cy.getByTestId("TableVisualization").find("tbody tr").should("have.length", length);
}
export function expectFirstColumnToHaveMembers(values) {
cy.getByTestId("TableVisualization")
.find("tbody tr td:first-child")
- .then($cell => Cypress.$.map($cell, item => Cypress.$(item).text()))
- .then(firstColumnCells => expect(firstColumnCells).to.have.members(values));
+ .then(($cell) => Cypress.$.map($cell, (item) => Cypress.$(item).text()))
+ .then((firstColumnCells) => expect(firstColumnCells).to.have.members(values));
}
diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 0000000000..8742d9f869
--- /dev/null
+++ b/codecov.yml
@@ -0,0 +1,6 @@
+comment:
+ layout: " diff, flags, files"
+ behavior: default
+ require_changes: false
+ require_base: true
+ require_head: true
diff --git a/docker-compose.yml b/compose.yaml
similarity index 85%
rename from docker-compose.yml
rename to compose.yaml
index fdfcdd1a5d..590e816a24 100644
--- a/docker-compose.yml
+++ b/compose.yaml
@@ -1,22 +1,23 @@
# This configuration file is for the **development** setup.
# For a production example please refer to getredash/setup repository on GitHub.
-version: "2.2"
x-redash-service: &redash-service
build:
context: .
args:
- skip_frontend_build: "true"
+ skip_frontend_build: "true" # set to empty string to build
volumes:
- .:/app
env_file:
- .env
x-redash-environment: &redash-environment
+ REDASH_HOST: http://localhost:5001
REDASH_LOG_LEVEL: "INFO"
REDASH_REDIS_URL: "redis://redis:6379/0"
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
REDASH_RATELIMIT_ENABLED: "false"
REDASH_MAIL_DEFAULT_SENDER: "redash@example.com"
REDASH_MAIL_SERVER: "email"
+ REDASH_MAIL_PORT: 1025
REDASH_ENFORCE_CSRF: "true"
# Set secret keys in the .env file
services:
@@ -27,7 +28,7 @@ services:
- postgres
- redis
ports:
- - "5000:5000"
+ - "5001:5000"
- "5678:5678"
environment:
<<: *redash-environment
@@ -48,21 +49,22 @@ services:
<<: *redash-environment
PYTHONUNBUFFERED: 0
redis:
- image: redis:3-alpine
+ image: redis:7-alpine
restart: unless-stopped
postgres:
- image: postgres:9.5-alpine
+ image: pgautoupgrade/pgautoupgrade:latest
+ ports:
+ - "15432:5432"
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
# improvement on my personal machine). We should consider moving this into a dedicated Docker Compose configuration for
# tests.
- ports:
- - "15432:5432"
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
restart: unless-stopped
environment:
POSTGRES_HOST_AUTH_METHOD: "trust"
email:
- image: djfarrelly/maildev
+ image: maildev/maildev
ports:
- - "1080:80"
+ - "1080:1080"
+ - "1025:1025"
restart: unless-stopped
diff --git a/cypress.config.js b/cypress.config.js
new file mode 100644
index 0000000000..292af0d4cb
--- /dev/null
+++ b/cypress.config.js
@@ -0,0 +1,22 @@
+const { defineConfig } = require('cypress')
+
+module.exports = defineConfig({
+ e2e: {
+ baseUrl: 'http://localhost:5001',
+ defaultCommandTimeout: 20000,
+ downloadsFolder: 'client/cypress/downloads',
+ fixturesFolder: 'client/cypress/fixtures',
+ requestTimeout: 15000,
+ screenshotsFolder: 'client/cypress/screenshots',
+ specPattern: 'client/cypress/integration/',
+ supportFile: 'client/cypress/support/index.js',
+ video: true,
+ videoUploadOnPasses: false,
+ videosFolder: 'client/cypress/videos',
+ viewportHeight: 1024,
+ viewportWidth: 1280,
+ env: {
+ coverage: false
+ }
+ },
+})
diff --git a/cypress.json b/cypress.json
deleted file mode 100644
index 312f085573..0000000000
--- a/cypress.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "baseUrl": "http://localhost:5000",
- "video": true,
- "videoUploadOnPasses": false,
- "fixturesFolder": "client/cypress/fixtures",
- "integrationFolder": "client/cypress/integration",
- "pluginsFile": "client/cypress/plugins/index.js",
- "screenshotsFolder": "client/cypress/screenshots",
- "videosFolder": "client/cypress/videos",
- "supportFile": "client/cypress/support/index.js",
- "defaultCommandTimeout": 20000,
- "requestTimeout": 15000,
- "viewportWidth": 1280,
- "viewportHeight": 1024,
- "env": {
- "coverage": false
- }
-}
diff --git a/manage.py b/manage.py
index 3ccb9ecb51..aa8d85c81e 100755
--- a/manage.py
+++ b/manage.py
@@ -5,5 +5,5 @@
from redash.cli import manager
-if __name__ == '__main__':
+if __name__ == "__main__":
manager()
diff --git a/migrations/versions/0ec979123ba4_.py b/migrations/versions/0ec979123ba4_.py
index 4dfbe1ba15..9f931b1c56 100644
--- a/migrations/versions/0ec979123ba4_.py
+++ b/migrations/versions/0ec979123ba4_.py
@@ -7,7 +7,7 @@
"""
from alembic import op
import sqlalchemy as sa
-from sqlalchemy.dialects import postgresql
+from sqlalchemy.dialects.postgresql import JSON
# revision identifiers, used by Alembic.
revision = '0ec979123ba4'
@@ -18,7 +18,7 @@
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
- op.add_column('dashboards', sa.Column('options', postgresql.JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
+ op.add_column('dashboards', sa.Column('options', JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
# ### end Alembic commands ###
diff --git a/migrations/versions/1038c2174f5d_make_case_insensitive_hash_of_query_text.py b/migrations/versions/1038c2174f5d_make_case_insensitive_hash_of_query_text.py
new file mode 100644
index 0000000000..c872a918b9
--- /dev/null
+++ b/migrations/versions/1038c2174f5d_make_case_insensitive_hash_of_query_text.py
@@ -0,0 +1,51 @@
+"""Make case insensitive hash of query text
+
+Revision ID: 1038c2174f5d
+Revises: fd4fc850d7ea
+Create Date: 2023-07-16 23:10:12.885949
+
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.sql import table
+
+from redash.utils import gen_query_hash
+
+# revision identifiers, used by Alembic.
+revision = '1038c2174f5d'
+down_revision = 'fd4fc850d7ea'
+branch_labels = None
+depends_on = None
+
+
+
+def change_query_hash(conn, table, query_text_to):
+ for record in conn.execute(table.select()):
+ query_text = query_text_to(record.query)
+ conn.execute(
+ table
+ .update()
+ .where(table.c.id == record.id)
+ .values(query_hash=gen_query_hash(query_text)))
+
+
+def upgrade():
+ queries = table(
+ 'queries',
+ sa.Column('id', sa.Integer, primary_key=True),
+ sa.Column('query', sa.Text),
+ sa.Column('query_hash', sa.String(length=10)))
+
+ conn = op.get_bind()
+ change_query_hash(conn, queries, query_text_to=str)
+
+
+def downgrade():
+ queries = table(
+ 'queries',
+ sa.Column('id', sa.Integer, primary_key=True),
+ sa.Column('query', sa.Text),
+ sa.Column('query_hash', sa.String(length=10)))
+
+ conn = op.get_bind()
+ change_query_hash(conn, queries, query_text_to=str.lower)
diff --git a/migrations/versions/640888ce445d_.py b/migrations/versions/640888ce445d_.py
index 0017a09527..876ce8b7ba 100644
--- a/migrations/versions/640888ce445d_.py
+++ b/migrations/versions/640888ce445d_.py
@@ -10,8 +10,7 @@
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table
-
-from redash.models import MutableDict, PseudoJSON
+from redash.models import MutableDict
# revision identifiers, used by Alembic.
@@ -41,7 +40,7 @@ def upgrade():
"queries",
sa.Column(
"schedule",
- MutableDict.as_mutable(PseudoJSON),
+ sa.Text(),
nullable=False,
server_default=json.dumps({}),
),
@@ -51,7 +50,7 @@ def upgrade():
queries = table(
"queries",
sa.Column("id", sa.Integer, primary_key=True),
- sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
+ sa.Column("schedule", sa.Text()),
sa.Column("old_schedule", sa.String(length=10)),
)
@@ -85,7 +84,7 @@ def downgrade():
"queries",
sa.Column(
"old_schedule",
- MutableDict.as_mutable(PseudoJSON),
+ sa.Text(),
nullable=False,
server_default=json.dumps({}),
),
@@ -93,8 +92,8 @@ def downgrade():
queries = table(
"queries",
- sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
- sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
+ sa.Column("schedule", sa.Text()),
+ sa.Column("old_schedule", sa.Text()),
)
op.execute(queries.update().values({"old_schedule": queries.c.schedule}))
@@ -106,7 +105,7 @@ def downgrade():
"queries",
sa.Column("id", sa.Integer, primary_key=True),
sa.Column("schedule", sa.String(length=10)),
- sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
+ sa.Column("old_schedule", sa.Text()),
)
conn = op.get_bind()
diff --git a/migrations/versions/7205816877ec_change_type_of_json_fields_from_varchar_.py b/migrations/versions/7205816877ec_change_type_of_json_fields_from_varchar_.py
new file mode 100644
index 0000000000..a907599261
--- /dev/null
+++ b/migrations/versions/7205816877ec_change_type_of_json_fields_from_varchar_.py
@@ -0,0 +1,135 @@
+"""change type of json fields from varchar to json
+
+Revision ID: 7205816877ec
+Revises: 7ce5925f832b
+Create Date: 2024-01-03 13:55:18.885021
+
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects.postgresql import JSONB, JSON
+
+
+# revision identifiers, used by Alembic.
+revision = '7205816877ec'
+down_revision = '7ce5925f832b'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ connection = op.get_bind()
+ op.alter_column('queries', 'options',
+ existing_type=sa.Text(),
+ type_=JSONB(astext_type=sa.Text()),
+ nullable=True,
+ postgresql_using='options::jsonb',
+ )
+ op.alter_column('queries', 'schedule',
+ existing_type=sa.Text(),
+ type_=JSONB(astext_type=sa.Text()),
+ nullable=True,
+ postgresql_using='schedule::jsonb',
+ )
+ op.alter_column('events', 'additional_properties',
+ existing_type=sa.Text(),
+ type_=JSONB(astext_type=sa.Text()),
+ nullable=True,
+ postgresql_using='additional_properties::jsonb',
+ )
+ op.alter_column('organizations', 'settings',
+ existing_type=sa.Text(),
+ type_=JSONB(astext_type=sa.Text()),
+ nullable=True,
+ postgresql_using='settings::jsonb',
+ )
+ op.alter_column('alerts', 'options',
+ existing_type=JSON(astext_type=sa.Text()),
+ type_=JSONB(astext_type=sa.Text()),
+ nullable=True,
+ postgresql_using='options::jsonb',
+ )
+ op.alter_column('dashboards', 'options',
+ existing_type=JSON(astext_type=sa.Text()),
+ type_=JSONB(astext_type=sa.Text()),
+ postgresql_using='options::jsonb',
+ )
+ op.alter_column('dashboards', 'layout',
+ existing_type=sa.Text(),
+ type_=JSONB(astext_type=sa.Text()),
+ postgresql_using='layout::jsonb',
+ )
+ op.alter_column('changes', 'change',
+ existing_type=JSON(astext_type=sa.Text()),
+ type_=JSONB(astext_type=sa.Text()),
+ postgresql_using='change::jsonb',
+ )
+ op.alter_column('visualizations', 'options',
+ existing_type=sa.Text(),
+ type_=JSONB(astext_type=sa.Text()),
+ postgresql_using='options::jsonb',
+ )
+ op.alter_column('widgets', 'options',
+ existing_type=sa.Text(),
+ type_=JSONB(astext_type=sa.Text()),
+ postgresql_using='options::jsonb',
+ )
+
+
+def downgrade():
+ connection = op.get_bind()
+ op.alter_column('queries', 'options',
+ existing_type=JSONB(astext_type=sa.Text()),
+ type_=sa.Text(),
+ postgresql_using='options::text',
+ existing_nullable=True,
+ )
+ op.alter_column('queries', 'schedule',
+ existing_type=JSONB(astext_type=sa.Text()),
+ type_=sa.Text(),
+ postgresql_using='schedule::text',
+ existing_nullable=True,
+ )
+ op.alter_column('events', 'additional_properties',
+ existing_type=JSONB(astext_type=sa.Text()),
+ type_=sa.Text(),
+ postgresql_using='additional_properties::text',
+ existing_nullable=True,
+ )
+ op.alter_column('organizations', 'settings',
+ existing_type=JSONB(astext_type=sa.Text()),
+ type_=sa.Text(),
+ postgresql_using='settings::text',
+ existing_nullable=True,
+ )
+ op.alter_column('alerts', 'options',
+ existing_type=JSONB(astext_type=sa.Text()),
+ type_=JSON(astext_type=sa.Text()),
+ postgresql_using='options::json',
+ existing_nullable=True,
+ )
+ op.alter_column('dashboards', 'options',
+ existing_type=JSONB(astext_type=sa.Text()),
+ type_=JSON(astext_type=sa.Text()),
+ postgresql_using='options::json',
+ )
+ op.alter_column('dashboards', 'layout',
+ existing_type=JSONB(astext_type=sa.Text()),
+ type_=sa.Text(),
+ postgresql_using='layout::text',
+ )
+ op.alter_column('changes', 'change',
+ existing_type=JSONB(astext_type=sa.Text()),
+ type_=JSON(astext_type=sa.Text()),
+ postgresql_using='change::json',
+ )
+ op.alter_column('visualizations', 'options',
+ type_=sa.Text(),
+ existing_type=JSONB(astext_type=sa.Text()),
+ postgresql_using='options::text',
+ )
+ op.alter_column('widgets', 'options',
+ type_=sa.Text(),
+ existing_type=JSONB(astext_type=sa.Text()),
+ postgresql_using='options::text',
+ )
diff --git a/migrations/versions/73beceabb948_bring_back_null_schedule.py b/migrations/versions/73beceabb948_bring_back_null_schedule.py
index b510639dd2..2a07a0b7bd 100644
--- a/migrations/versions/73beceabb948_bring_back_null_schedule.py
+++ b/migrations/versions/73beceabb948_bring_back_null_schedule.py
@@ -7,10 +7,9 @@
"""
from alembic import op
import sqlalchemy as sa
-from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import table
-from redash.models import MutableDict, PseudoJSON
+from redash.models import MutableDict
# revision identifiers, used by Alembic.
revision = "73beceabb948"
@@ -43,7 +42,7 @@ def upgrade():
queries = table(
"queries",
sa.Column("id", sa.Integer, primary_key=True),
- sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
+ sa.Column("schedule", sa.Text()),
)
conn = op.get_bind()
diff --git a/migrations/versions/7ce5925f832b_create_sqlalchemy_searchable_expressions.py b/migrations/versions/7ce5925f832b_create_sqlalchemy_searchable_expressions.py
new file mode 100644
index 0000000000..46a0370392
--- /dev/null
+++ b/migrations/versions/7ce5925f832b_create_sqlalchemy_searchable_expressions.py
@@ -0,0 +1,25 @@
+"""create sqlalchemy_searchable expressions
+
+Revision ID: 7ce5925f832b
+Revises: 1038c2174f5d
+Create Date: 2023-09-29 16:48:29.517762
+
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy_searchable import sql_expressions
+
+
+# revision identifiers, used by Alembic.
+revision = '7ce5925f832b'
+down_revision = '1038c2174f5d'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.execute(sql_expressions)
+
+
+def downgrade():
+ pass
diff --git a/migrations/versions/969126bd800f_.py b/migrations/versions/969126bd800f_.py
index 17eec1153d..1a49383861 100644
--- a/migrations/versions/969126bd800f_.py
+++ b/migrations/versions/969126bd800f_.py
@@ -6,7 +6,7 @@
"""
-import simplejson
+import json
from alembic import op
import sqlalchemy as sa
@@ -27,7 +27,7 @@ def upgrade():
dashboard_result = db.session.execute("SELECT id, layout FROM dashboards")
for dashboard in dashboard_result:
print(" Updating dashboard: {}".format(dashboard["id"]))
- layout = simplejson.loads(dashboard["layout"])
+ layout = json.loads(dashboard["layout"])
print(" Building widgets map:")
widgets = {}
@@ -53,7 +53,7 @@ def upgrade():
if widget is None:
continue
- options = simplejson.loads(widget["options"]) or {}
+ options = json.loads(widget["options"]) or {}
options["position"] = {
"row": row_index,
"col": column_index * column_size,
@@ -62,7 +62,7 @@ def upgrade():
db.session.execute(
"UPDATE widgets SET options=:options WHERE id=:id",
- {"options": simplejson.dumps(options), "id": widget_id},
+ {"options": json.dumps(options), "id": widget_id},
)
dashboard_result.close()
diff --git a/migrations/versions/98af61feea92_add_encrypted_options_to_data_sources.py b/migrations/versions/98af61feea92_add_encrypted_options_to_data_sources.py
index 23670adfee..0c22043613 100644
--- a/migrations/versions/98af61feea92_add_encrypted_options_to_data_sources.py
+++ b/migrations/versions/98af61feea92_add_encrypted_options_to_data_sources.py
@@ -7,7 +7,7 @@
"""
from alembic import op
import sqlalchemy as sa
-from sqlalchemy.dialects import postgresql
+from sqlalchemy.dialects.postgresql import BYTEA
from sqlalchemy.sql import table
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
@@ -18,7 +18,6 @@
Configuration,
MutableDict,
MutableList,
- PseudoJSON,
)
# revision identifiers, used by Alembic.
@@ -31,7 +30,7 @@
def upgrade():
op.add_column(
"data_sources",
- sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True),
+ sa.Column("encrypted_options", BYTEA(), nullable=True),
)
# copy values
diff --git a/migrations/versions/a92d92aa678e_inline_tags.py b/migrations/versions/a92d92aa678e_inline_tags.py
index f79924dc62..40421cf468 100644
--- a/migrations/versions/a92d92aa678e_inline_tags.py
+++ b/migrations/versions/a92d92aa678e_inline_tags.py
@@ -9,7 +9,7 @@
from funcy import flatten, compact
from alembic import op
import sqlalchemy as sa
-from sqlalchemy.dialects import postgresql
+from sqlalchemy.dialects.postgresql import ARRAY
from redash import models
# revision identifiers, used by Alembic.
@@ -21,10 +21,10 @@
def upgrade():
op.add_column(
- "dashboards", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
+ "dashboards", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
)
op.add_column(
- "queries", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
+ "queries", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
)
diff --git a/migrations/versions/d7d747033183_encrypt_alert_destinations.py b/migrations/versions/d7d747033183_encrypt_alert_destinations.py
index 252e5bc225..d66460c3f2 100644
--- a/migrations/versions/d7d747033183_encrypt_alert_destinations.py
+++ b/migrations/versions/d7d747033183_encrypt_alert_destinations.py
@@ -7,7 +7,7 @@
"""
from alembic import op
import sqlalchemy as sa
-from sqlalchemy.dialects import postgresql
+from sqlalchemy.dialects.postgresql import BYTEA
from sqlalchemy.sql import table
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
@@ -30,7 +30,7 @@
def upgrade():
op.add_column(
"notification_destinations",
- sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True)
+ sa.Column("encrypted_options", BYTEA(), nullable=True)
)
# copy values
diff --git a/migrations/versions/e7f8a917aa8e_add_user_details_json_column.py b/migrations/versions/e7f8a917aa8e_add_user_details_json_column.py
index a5a827091c..77c4f54485 100644
--- a/migrations/versions/e7f8a917aa8e_add_user_details_json_column.py
+++ b/migrations/versions/e7f8a917aa8e_add_user_details_json_column.py
@@ -7,7 +7,7 @@
"""
from alembic import op
import sqlalchemy as sa
-from sqlalchemy.dialects import postgresql
+from sqlalchemy.dialects.postgresql import JSON
# revision identifiers, used by Alembic.
revision = "e7f8a917aa8e"
@@ -21,7 +21,7 @@ def upgrade():
"users",
sa.Column(
"details",
- postgresql.JSON(astext_type=sa.Text()),
+ JSON(astext_type=sa.Text()),
server_default="{}",
nullable=True,
),
diff --git a/migrations/versions/fd4fc850d7ea_.py b/migrations/versions/fd4fc850d7ea_.py
new file mode 100644
index 0000000000..18c6ba01ea
--- /dev/null
+++ b/migrations/versions/fd4fc850d7ea_.py
@@ -0,0 +1,60 @@
+"""Convert user details to jsonb and move user profile image url into details column
+
+Revision ID: fd4fc850d7ea
+Revises: 89bc7873a3e0
+Create Date: 2022-01-31 15:24:16.507888
+
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects.postgresql import JSON, JSONB
+
+from redash.models import db
+
+# revision identifiers, used by Alembic.
+revision = 'fd4fc850d7ea'
+down_revision = '89bc7873a3e0'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ connection = op.get_bind()
+
+ ### commands auto generated by Alembic - please adjust! ###
+ op.alter_column('users', 'details',
+ existing_type=JSON(astext_type=sa.Text()),
+ type_=JSONB(astext_type=sa.Text()),
+ existing_nullable=True,
+ existing_server_default=sa.text("'{}'::jsonb"))
+ ### end Alembic commands ###
+
+ update_query = """
+ update users
+ set details = details::jsonb || ('{"profile_image_url": "' || profile_image_url || '"}')::jsonb
+ where 1=1
+ """
+ connection.execute(update_query)
+ op.drop_column("users", "profile_image_url")
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ connection = op.get_bind()
+ op.add_column("users", sa.Column("profile_image_url", db.String(320), nullable=True))
+
+ update_query = """
+ update users set
+ profile_image_url = details->>'profile_image_url',
+ details = details - 'profile_image_url' ;
+ """
+
+ connection.execute(update_query)
+ db.session.commit()
+ op.alter_column('users', 'details',
+ existing_type=JSONB(astext_type=sa.Text()),
+ type_=JSON(astext_type=sa.Text()),
+ existing_nullable=True,
+ existing_server_default=sa.text("'{}'::json"))
+
+ # ### end Alembic commands ###
diff --git a/netlify.toml b/netlify.toml
index 371050b4fc..47c48e1fbb 100644
--- a/netlify.toml
+++ b/netlify.toml
@@ -1,10 +1,14 @@
[build]
base = "client"
publish = "client/dist"
- command = "npm ci && npm run build"
+ # Netlify doesn't seem to install Yarn even though NETLIFY_USE_YARN is set below
+ # command = "cd ../ && npm i -g yarn@1.22.19 && yarn --frozen-lockfile --force && cd viz-lib && yarn build:babel && cd .. && rm -r ./node_modules/@redash/viz && cp -r ./viz-lib/. ./node_modules/@redash/viz && yarn build && cd ./client"
+ command = "cd ../ && yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 && yarn build && cd ./client"
[build.environment]
- NODE_VERSION = "12.18.4"
+ NODE_VERSION = "18"
+ NETLIFY_USE_YARN = "true"
+ YARN_VERSION = "1.22.19"
CYPRESS_INSTALL_BINARY = "0"
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD = "1"
diff --git a/package.json b/package.json
index e726fdf64d..f4751f3ca7 100644
--- a/package.json
+++ b/package.json
@@ -5,37 +5,37 @@
"main": "index.js",
"scripts": {
"start": "npm-run-all --parallel watch:viz webpack-dev-server",
- "bundle": "bin/bundle-extensions",
"clean": "rm -rf ./client/dist/",
- "build:viz": "(cd viz-lib && npm run build:babel)",
- "build": "npm run clean && npm run build:viz && NODE_ENV=production webpack",
- "build:old-node-version": "npm run clean && NODE_ENV=production node --max-old-space-size=4096 node_modules/.bin/webpack",
- "watch:app": "webpack --watch --progress --colors -d",
- "watch:viz": "(cd viz-lib && npm run watch:babel)",
+ "build:viz": "(cd viz-lib && yarn build:babel)",
+ "build": "yarn clean && yarn build:viz && NODE_OPTIONS=--openssl-legacy-provider NODE_ENV=production webpack",
+ "watch:app": "NODE_OPTIONS=--openssl-legacy-provider webpack watch --progress",
+ "watch:viz": "(cd viz-lib && yarn watch:babel)",
"watch": "npm-run-all --parallel watch:*",
"webpack-dev-server": "webpack-dev-server",
- "analyze": "npm run clean && BUNDLE_ANALYZER=on webpack",
- "analyze:build": "npm run clean && NODE_ENV=production BUNDLE_ANALYZER=on webpack",
- "lint": "npm run lint:base -- --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
- "lint:fix": "npm run lint:base -- --fix --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
+ "analyze": "yarn clean && BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
+ "analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
+ "lint": "yarn lint:base --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
+ "lint:fix": "yarn lint:base --fix --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
"lint:base": "eslint --config ./client/.eslintrc.js --ignore-path ./client/.eslintignore",
- "lint:ci": "npm run lint -- --max-warnings 0 --format junit --output-file /tmp/test-results/eslint/results.xml",
+ "lint:ci": "yarn lint --max-warnings 0 --format junit --output-file /tmp/test-results/eslint/results.xml",
"prettier": "prettier --write 'client/app/**/*.{js,jsx,ts,tsx}' 'client/cypress/**/*.{js,jsx,ts,tsx}'",
"type-check": "tsc --noEmit --project client/tsconfig.json",
- "type-check:watch": "npm run type-check -- --watch",
+ "type-check:watch": "yarn type-check --watch",
"jest": "TZ=Africa/Khartoum jest",
"test": "run-s type-check jest",
"test:watch": "jest --watch",
"cypress": "node client/cypress/cypress.js",
- "postinstall": "(cd viz-lib && npm ci && npm run build:babel)"
+ "preinstall": "cd viz-lib && yarn link --link-folder ../.yarn",
+ "postinstall": "(cd viz-lib && yarn --frozen-lockfile && yarn build:babel) && yarn link --link-folder ./.yarn @redash/viz"
},
"repository": {
"type": "git",
"url": "git+https://github.com/getredash/redash.git"
},
"engines": {
- "node": "^12.0.0",
- "npm": "^6.0.0"
+ "node": ">16.0 <21.0",
+ "npm": "please-use-yarn",
+ "yarn": "^1.22.10"
},
"author": "Redash Contributors",
"license": "BSD-2-Clause",
@@ -54,8 +54,9 @@
"classnames": "^2.2.6",
"core-js": "^2.6.12",
"d3": "^3.5.17",
- "debug": "^3.1.0",
- "dompurify": "^2.0.7",
+ "debug": "^3.2.7",
+ "dompurify": "^2.0.17",
+ "elliptic": "^6.5.7",
"font-awesome": "^4.7.0",
"history": "^4.10.1",
"hoist-non-react-statics": "^3.3.0",
@@ -65,10 +66,10 @@
"mousetrap": "^1.6.1",
"mustache": "^2.3.0",
"numeral": "^2.0.6",
- "path-to-regexp": "^3.1.0",
+ "path-to-regexp": "^3.3.0",
"prop-types": "^15.6.1",
"query-string": "^6.9.0",
- "react": "^16.14.0",
+ "react": "16.14.0",
"react-ace": "^9.1.1",
"react-dom": "^16.14.0",
"react-grid-layout": "^0.18.2",
@@ -80,18 +81,18 @@
"use-media": "^1.4.0"
},
"devDependencies": {
- "@babel/cli": "^7.10.4",
- "@babel/core": "^7.10.4",
- "@babel/plugin-proposal-class-properties": "^7.10.4",
+ "@babel/cli": "^7.22.9",
+ "@babel/core": "^7.22.9",
+ "@babel/plugin-proposal-class-properties": "^7.18.6",
"@babel/plugin-transform-object-assign": "^7.2.0",
- "@babel/preset-env": "^7.10.4",
+ "@babel/preset-env": "^7.22.9",
"@babel/preset-react": "^7.0.0",
- "@babel/preset-typescript": "^7.10.4",
- "@cypress/code-coverage": "^3.8.1",
- "@percy/agent": "0.24.3",
- "@percy/cypress": "^2.3.2",
- "@pmmmwh/react-refresh-webpack-plugin": "^0.4.3",
- "@testing-library/cypress": "^7.0.2",
+ "@babel/preset-typescript": "^7.22.5",
+ "@cypress/code-coverage": "^3.11.0",
+ "@percy/agent": "^0.28.7",
+ "@percy/cypress": "^3.1.2",
+ "@pmmmwh/react-refresh-webpack-plugin": "^0.5.10",
+ "@testing-library/cypress": "^8.0.7",
"@types/classnames": "^2.2.10",
"@types/hoist-non-react-statics": "^3.3.1",
"@types/lodash": "^4.14.157",
@@ -104,19 +105,20 @@
"atob": "^2.1.2",
"babel-eslint": "^10.0.3",
"babel-jest": "^24.1.0",
- "babel-loader": "^8.0.5",
- "babel-plugin-istanbul": "^6.0.0",
+ "babel-loader": "^8.3.0",
+ "babel-plugin-istanbul": "^6.1.1",
"babel-plugin-transform-builtin-extend": "^1.1.2",
- "copy-webpack-plugin": "^4.5.3",
- "css-loader": "^0.28.7",
- "cypress": "^5.3.0",
+ "copy-webpack-plugin": "^6.4.1",
+ "css-loader": "^5.2.7",
+ "cypress": "^11.2.0",
+ "dayjs": "^1.11.9",
"enzyme": "^3.8.0",
"enzyme-adapter-react-16": "^1.7.1",
"enzyme-to-json": "^3.3.5",
"eslint": "^6.7.2",
"eslint-config-prettier": "^6.7.0",
"eslint-config-react-app": "^5.1.0",
- "eslint-loader": "^3.0.3",
+ "eslint-loader": "^4.0.2",
"eslint-plugin-chai-friendly": "^0.5.0",
"eslint-plugin-compat": "^3.3.0",
"eslint-plugin-cypress": "^2.0.1",
@@ -127,35 +129,34 @@
"eslint-plugin-no-only-tests": "^2.4.0",
"eslint-plugin-react": "^7.17.0",
"eslint-plugin-react-hooks": "^1.7.0",
- "file-loader": "^2.0.0",
- "html-webpack-plugin": "^3.2.0",
+ "file-loader": "^6.2.0",
+ "html-webpack-plugin": "^4.5.2",
"identity-obj-proxy": "^3.0.0",
"jest": "^24.1.0",
- "less": "^3.9.0",
- "less-loader": "^4.1.0",
- "less-plugin-autoprefix": "^1.5.1",
- "lodash": "^4.17.20",
- "mini-css-extract-plugin": "^0.4.4",
+ "less": "^3.13.1",
+ "less-loader": "^5.0.0",
+ "less-plugin-autoprefix": "^2.0.0",
+ "lodash": "^4.17.21",
+ "mini-css-extract-plugin": "^1.6.2",
"mockdate": "^2.0.2",
"npm-run-all": "^4.1.5",
"prettier": "^1.19.1",
"raw-loader": "^0.5.1",
- "react-refresh": "^0.9.0",
- "react-test-renderer": "^16.5.2",
- "request": "^2.88.0",
+ "react-refresh": "^0.14.0",
+ "react-test-renderer": "^16.14.0",
"request-cookies": "^1.1.0",
"style-loader": "^2.0.0",
"typescript": "^4.1.2",
- "url-loader": "^1.1.2",
- "webpack": "^4.44.2",
- "webpack-build-notifier": "^0.1.30",
- "webpack-bundle-analyzer": "^2.11.1",
- "webpack-cli": "^3.1.2",
- "webpack-dev-server": "^3.11.0",
+ "url-loader": "^4.1.1",
+ "webpack": "^4.46.0",
+ "webpack-build-notifier": "^2.3.0",
+ "webpack-bundle-analyzer": "^4.9.0",
+ "webpack-cli": "^4.10.0",
+ "webpack-dev-server": "^4.15.1",
"webpack-manifest-plugin": "^2.0.4"
},
"optionalDependencies": {
- "fsevents": "^1.2.9"
+ "fsevents": "^2.3.2"
},
"jest": {
"rootDir": "./client",
@@ -180,6 +181,10 @@
"viz-lib/**"
]
},
+ "browser": {
+ "fs": false,
+ "path": false
+ },
"//": "browserslist set to 'Async functions' compatibility",
"browserslist": [
"Edge >= 15",
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000000..3db965376b
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,5459 @@
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+
+[[package]]
+name = "adal"
+version = "1.2.7"
+description = "Note: This library is already replaced by MSAL Python, available here: https://pypi.org/project/msal/ .ADAL Python remains available here as a legacy. The ADAL for Python library makes it easy for python application to authenticate to Azure Active Directory (AAD) in order to access AAD protected web resources."
+optional = false
+python-versions = "*"
+files = [
+ {file = "adal-1.2.7-py2.py3-none-any.whl", hash = "sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d"},
+ {file = "adal-1.2.7.tar.gz", hash = "sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1"},
+]
+
+[package.dependencies]
+cryptography = ">=1.1.0"
+PyJWT = ">=1.0.0,<3"
+python-dateutil = ">=2.1.0,<3"
+requests = ">=2.0.0,<3"
+
+[[package]]
+name = "advocate"
+version = "1.0.0"
+description = "A wrapper around the requests library for safely making HTTP requests on behalf of a third party"
+optional = false
+python-versions = "*"
+files = [
+ {file = "advocate-1.0.0-py2.py3-none-any.whl", hash = "sha256:e8b340e49fadc0e416fbc9e81ef52d74858ccad16357dabde6cf9d99a7407d70"},
+ {file = "advocate-1.0.0.tar.gz", hash = "sha256:1bf1170e41334279996580329c594e017540ab0eaf7a152323e743f0a85a353d"},
+]
+
+[package.dependencies]
+ndg-httpsclient = "*"
+netifaces = ">=0.10.5"
+pyasn1 = "*"
+pyopenssl = "*"
+requests = ">=2.18.0,<3.0"
+six = "*"
+urllib3 = ">=1.22,<2.0"
+
+[[package]]
+name = "alembic"
+version = "1.13.1"
+description = "A database migration tool for SQLAlchemy."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"},
+ {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"},
+]
+
+[package.dependencies]
+importlib-metadata = {version = "*", markers = "python_version < \"3.9\""}
+importlib-resources = {version = "*", markers = "python_version < \"3.9\""}
+Mako = "*"
+SQLAlchemy = ">=1.3.0"
+typing-extensions = ">=4"
+
+[package.extras]
+tz = ["backports.zoneinfo"]
+
+[[package]]
+name = "aniso8601"
+version = "8.0.0"
+description = "A library for parsing ISO 8601 strings."
+optional = false
+python-versions = "*"
+files = [
+ {file = "aniso8601-8.0.0-py2.py3-none-any.whl", hash = "sha256:c033f63d028b9a58e3ab0c2c7d0532ab4bfa7452bfc788fbfe3ddabd327b181a"},
+ {file = "aniso8601-8.0.0.tar.gz", hash = "sha256:529dcb1f5f26ee0df6c0a1ee84b7b27197c3c50fc3a6321d66c544689237d072"},
+]
+
+[[package]]
+name = "anyio"
+version = "4.2.0"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"},
+ {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"},
+]
+
+[package.dependencies]
+exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
+idna = ">=2.8"
+sniffio = ">=1.1"
+typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
+
+[package.extras]
+doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
+trio = ["trio (>=0.23)"]
+
+[[package]]
+name = "appdirs"
+version = "1.4.4"
+description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+optional = false
+python-versions = "*"
+files = [
+ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
+ {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
+]
+
+[[package]]
+name = "asn1crypto"
+version = "1.5.1"
+description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP"
+optional = false
+python-versions = "*"
+files = [
+ {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"},
+ {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"},
+]
+
+[[package]]
+name = "async-timeout"
+version = "4.0.3"
+description = "Timeout context manager for asyncio programs"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
+ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
+]
+
+[[package]]
+name = "atsd-client"
+version = "3.0.5"
+description = "Axibase Time Series Database API Client for Python"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "atsd_client-3.0.5-py3-none-any.whl", hash = "sha256:36cb4adffb58afd994dace6eb703711e6c40c2977c974783492d99e10104bfd5"},
+]
+
+[package.dependencies]
+python-dateutil = "*"
+requests = ">=2.12.1"
+tzlocal = "*"
+
+[package.extras]
+analysis = ["pandas"]
+
+[[package]]
+name = "attrs"
+version = "23.2.0"
+description = "Classes Without Boilerplate"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
+ {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
+]
+
+[package.extras]
+cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
+dev = ["attrs[tests]", "pre-commit"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
+tests = ["attrs[tests-no-zope]", "zope-interface"]
+tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
+tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
+
+[[package]]
+name = "authlib"
+version = "0.15.5"
+description = "The ultimate Python library in building OAuth and OpenID Connect servers."
+optional = false
+python-versions = "*"
+files = [
+ {file = "Authlib-0.15.5-py2.py3-none-any.whl", hash = "sha256:ecf4a7a9f2508c0bb07e93a752dd3c495cfaffc20e864ef0ffc95e3f40d2abaf"},
+ {file = "Authlib-0.15.5.tar.gz", hash = "sha256:b83cf6360c8e92b0e9df0d1f32d675790bcc4e3c03977499b1eed24dcdef4252"},
+]
+
+[package.dependencies]
+cryptography = "*"
+
+[package.extras]
+client = ["requests"]
+
+[[package]]
+name = "azure-kusto-data"
+version = "0.0.35"
+description = "Kusto Data Client"
+optional = false
+python-versions = "*"
+files = [
+ {file = "azure-kusto-data-0.0.35.tar.gz", hash = "sha256:0a64ac028cd10c5ba0212c2238b4648d3f613360d305139217addea9a5136869"},
+ {file = "azure_kusto_data-0.0.35-py2.py3-none-any.whl", hash = "sha256:3862261c4812eeb05fa110e2df951de0df838f3f1fe9d46a48879b8f5532b365"},
+]
+
+[package.dependencies]
+adal = ">=1.0.0"
+python-dateutil = ">=2.8.0"
+requests = ">=2.13.0"
+six = ">=1.10.0"
+
+[package.extras]
+pandas = ["pandas (==0.24.1)"]
+
+[[package]]
+name = "backoff"
+version = "2.2.1"
+description = "Function decoration for backoff and retry"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
+ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
+]
+
+[[package]]
+name = "backports-zoneinfo"
+version = "0.2.1"
+description = "Backport of the standard library zoneinfo module"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"},
+ {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"},
+]
+
+[package.extras]
+tzdata = ["tzdata"]
+
+[[package]]
+name = "bcrypt"
+version = "4.1.2"
+description = "Modern password hashing for your software and your servers"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"},
+ {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"},
+ {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"},
+ {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"},
+ {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"},
+ {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"},
+ {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"},
+ {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"},
+ {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"},
+ {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"},
+ {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"},
+ {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"},
+ {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"},
+ {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"},
+ {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"},
+ {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"},
+ {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"},
+ {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"},
+ {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"},
+ {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"},
+ {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"},
+ {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"},
+ {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"},
+ {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"},
+ {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"},
+ {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"},
+ {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"},
+]
+
+[package.extras]
+tests = ["pytest (>=3.2.1,!=3.3.0)"]
+typecheck = ["mypy"]
+
+[[package]]
+name = "bitarray"
+version = "2.9.2"
+description = "efficient arrays of booleans -- C extension"
+optional = false
+python-versions = "*"
+files = [
+ {file = "bitarray-2.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:917905de565d9576eb20f53c797c15ba88b9f4f19728acabec8d01eee1d3756a"},
+ {file = "bitarray-2.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b35bfcb08b7693ab4bf9059111a6e9f14e07d57ac93cd967c420db58ab9b71e1"},
+ {file = "bitarray-2.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ea1923d2e7880f9e1959e035da661767b5a2e16a45dfd57d6aa831e8b65ee1bf"},
+ {file = "bitarray-2.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0b63a565e8a311cc8348ff1262d5784df0f79d64031d546411afd5dd7ef67d"},
+ {file = "bitarray-2.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf0620da2b81946d28c0b16f3e3704d38e9837d85ee4f0652816e2609aaa4fed"},
+ {file = "bitarray-2.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79a9b8b05f2876c7195a2b698c47528e86a73c61ea203394ff8e7a4434bda5c8"},
+ {file = "bitarray-2.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:345c76b349ff145549652436235c5532e5bfe9db690db6f0a6ad301c62b9ef21"},
+ {file = "bitarray-2.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e2936f090bf3f4d1771f44f9077ebccdbc0415d2b598d51a969afcb519df505"},
+ {file = "bitarray-2.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f9346e98fc2abcef90b942973087e2462af6d3e3710e82938078d3493f7fef52"},
+ {file = "bitarray-2.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e6ec283d4741befb86e8c3ea2e9ac1d17416c956d392107e45263e736954b1f7"},
+ {file = "bitarray-2.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:962892646599529917ef26266091e4cb3077c88b93c3833a909d68dcc971c4e3"},
+ {file = "bitarray-2.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e8da5355d7d75a52df5b84750989e34e39919ec7e59fafc4c104cc1607ab2d31"},
+ {file = "bitarray-2.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:603e7d640e54ad764d2b4da6b61e126259af84f253a20f512dd10689566e5478"},
+ {file = "bitarray-2.9.2-cp310-cp310-win32.whl", hash = "sha256:f00079f8e69d75c2a417de7961a77612bb77ef46c09bc74607d86de4740771ef"},
+ {file = "bitarray-2.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:1bb33673e7f7190a65f0a940c1ef63266abdb391f4a3e544a47542d40a81f536"},
+ {file = "bitarray-2.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fe71fd4b76380c2772f96f1e53a524da7063645d647a4fcd3b651bdd80ca0f2e"},
+ {file = "bitarray-2.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d527172919cdea1e13994a66d9708a80c3d33dedcf2f0548e4925e600fef3a3a"},
+ {file = "bitarray-2.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:052c5073bdcaa9dd10628d99d37a2f33ec09364b86dd1f6281e2d9f8d3db3060"},
+ {file = "bitarray-2.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e064caa55a6ed493aca1eda06f8b3f689778bc780a75e6ad7724642ba5dc62f7"},
+ {file = "bitarray-2.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:508069a04f658210fdeee85a7a0ca84db4bcc110cbb1d21f692caa13210f24a7"},
+ {file = "bitarray-2.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4da73ebd537d75fa7bccfc2228fcaedea0803f21dd9d0bf0d3b67fef3c4af294"},
+ {file = "bitarray-2.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cb378eaa65cd43098f11ff5d27e48ee3b956d2c00d2d6b5bfc2a09fe183be47"},
+ {file = "bitarray-2.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d14c790b91f6cbcd9b718f88ed737c78939980c69ac8c7f03dd7e60040c12951"},
+ {file = "bitarray-2.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eea9318293bc0ea6447e9ebfba600a62f3428bea7e9c6d42170ae4f481dbab3"},
+ {file = "bitarray-2.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b76ffec27c7450b8a334f967366a9ebadaea66ee43f5b530c12861b1a991f503"},
+ {file = "bitarray-2.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:76b76a07d4ee611405045c6950a1e24c4362b6b44808d4ad6eea75e0dbc59af4"},
+ {file = "bitarray-2.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c7d16beeaaab15b075990cd26963d6b5b22e8c5becd131781514a00b8bdd04bd"},
+ {file = "bitarray-2.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60df43e868a615c7e15117a1e1c2e5e11f48f6457280eba6ddf8fbefbec7da99"},
+ {file = "bitarray-2.9.2-cp311-cp311-win32.whl", hash = "sha256:e788608ed7767b7b3bbde6d49058bccdf94df0de9ca75d13aa99020cc7e68095"},
+ {file = "bitarray-2.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:a23397da092ef0a8cfe729571da64c2fc30ac18243caa82ac7c4f965087506ff"},
+ {file = "bitarray-2.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:90e3a281ffe3897991091b7c46fca38c2675bfd4399ffe79dfeded6c52715436"},
+ {file = "bitarray-2.9.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bed637b674db5e6c8a97a4a321e3e4d73e72d50b5c6b29950008a93069cc64cd"},
+ {file = "bitarray-2.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e49066d251dbbe4e6e3a5c3937d85b589e40e2669ad0eef41a00f82ec17d844b"},
+ {file = "bitarray-2.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c4344e96642e2211fb3a50558feff682c31563a4c64529a931769d40832ca79"},
+ {file = "bitarray-2.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aeb60962ec4813c539a59fbd4f383509c7222b62c3fb1faa76b54943a613e33a"},
+ {file = "bitarray-2.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed0f7982f10581bb16553719e5e8f933e003f5b22f7d25a68bdb30fac630a6ff"},
+ {file = "bitarray-2.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c71d1cabdeee0cdda4669168618f0e46b7dace207b29da7b63aaa1adc2b54081"},
+ {file = "bitarray-2.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0ef2d0a6f1502d38d911d25609b44c6cc27bee0a4363dd295df78b075041b60"},
+ {file = "bitarray-2.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6f71d92f533770fb027388b35b6e11988ab89242b883f48a6fe7202d238c61f8"},
+ {file = "bitarray-2.9.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ba0734aa300757c924f3faf8148e1b8c247176a0ac8e16aefdf9c1eb19e868f7"},
+ {file = "bitarray-2.9.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:d91406f413ccbf4af6ab5ae7bc78f772a95609f9ddd14123db36ef8c37116d95"},
+ {file = "bitarray-2.9.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:87abb7f80c0a042f3fe8e5264da1a2756267450bb602110d5327b8eaff7682e7"},
+ {file = "bitarray-2.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b558ce85579b51a2e38703877d1e93b7728a7af664dd45a34e833534f0b755d"},
+ {file = "bitarray-2.9.2-cp312-cp312-win32.whl", hash = "sha256:dac2399ee2889fbdd3472bfc2ede74c34cceb1ccf29a339964281a16eb1d3188"},
+ {file = "bitarray-2.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:48a30d718d1a6dfc22a49547450107abe8f4afdf2abdcbe76eb9ed88edc49498"},
+ {file = "bitarray-2.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2c6be1b651fad8f3adb7a5aa12c65b612cd9b89530969af941844ae680f7d981"},
+ {file = "bitarray-2.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5b399ae6ab975257ec359f03b48fc00b1c1cd109471e41903548469b8feae5c"},
+ {file = "bitarray-2.9.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b3543c8a1cb286ad105f11c25d8d0f712f41c5c55f90be39f0e5a1376c7d0b0"},
+ {file = "bitarray-2.9.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:03adaacb79e2fb8f483ab3a67665eec53bb3fd0cd5dbd7358741aef124688db3"},
+ {file = "bitarray-2.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ae5b0657380d2581e13e46864d147a52c1e2bbac9f59b59c576e42fa7d10cf0"},
+ {file = "bitarray-2.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c1f4bf6ea8eb9d7f30808c2e9894237a96650adfecbf5f3643862dc5982f89e"},
+ {file = "bitarray-2.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a8873089be2aa15494c0f81af1209f6e1237d762c5065bc4766c1b84321e1b50"},
+ {file = "bitarray-2.9.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:677e67f50e2559efc677a4366707070933ad5418b8347a603a49a070890b19bc"},
+ {file = "bitarray-2.9.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:a620d8ce4ea2f1c73c6b6b1399e14cb68c6915e2be3fad5808c2998ed55b4acf"},
+ {file = "bitarray-2.9.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:64115ccabbdbe279c24c367b629c6b1d3da9ed36c7420129e27c338a3971bfee"},
+ {file = "bitarray-2.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d6fb422772e75385b76ad1c52f45a68bd4efafd8be8d0061c11877be74c4d43"},
+ {file = "bitarray-2.9.2-cp36-cp36m-win32.whl", hash = "sha256:852e202875dd6dfd6139ce7ec4e98dac2b17d8d25934dc99900831e81c3adaef"},
+ {file = "bitarray-2.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:7dfefdcb0dc6a3ba9936063cec65a74595571b375beabe18742b3d91d087eefd"},
+ {file = "bitarray-2.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b306c4cf66912511422060f7f5e1149c8bdb404f8e00e600561b0749fdd45659"},
+ {file = "bitarray-2.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a09c4f81635408e3387348f415521d4b94198c562c23330f560596a6aaa26eaf"},
+ {file = "bitarray-2.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5361413fd2ecfdf44dc8f065177dc6aba97fa80a91b815586cb388763acf7f8d"},
+ {file = "bitarray-2.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e8a9475d415ef1eaae7942df6f780fa4dcd48fce32825eda591a17abba869299"},
+ {file = "bitarray-2.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9b87baa7bfff9a5878fcc1bffe49ecde6e647a72a64b39a69cd8a2992a43a34"},
+ {file = "bitarray-2.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb6b86cfdfc503e92cb71c68766a24565359136961642504a7cc9faf936d9c88"},
+ {file = "bitarray-2.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cd56b8ae87ebc71bcacbd73615098e8a8de952ecbb5785b6b4e2b07da8a06e1f"},
+ {file = "bitarray-2.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3fa909cfd675004aed8b4cc9df352415933656e0155a6209d878b7cb615c787e"},
+ {file = "bitarray-2.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b069ca9bf728e0c5c5b60e00a89df9af34cc170c695c3bfa3b372d8f40288efb"},
+ {file = "bitarray-2.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6067f2f07a7121749858c7daa93c8774325c91590b3e81a299621e347740c2ae"},
+ {file = "bitarray-2.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:321841cdad1dd0f58fe62e80e9c9c7531f8ebf8be93f047401e930dc47425b1e"},
+ {file = "bitarray-2.9.2-cp37-cp37m-win32.whl", hash = "sha256:54e16e32e60973bb83c315de9975bc1bcfc9bd50bb13001c31da159bc49b0ca1"},
+ {file = "bitarray-2.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:f4dcadb7b8034aa3491ee8f5a69b3d9ba9d7d1e55c3cc1fc45be313e708277f8"},
+ {file = "bitarray-2.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c8919fdbd3bb596b104388b56ae4b266eb28da1f2f7dff2e1f9334a21840fe96"},
+ {file = "bitarray-2.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb7a9d8a2e400a1026de341ad48e21670a6261a75b06df162c5c39b0d0e7c8f4"},
+ {file = "bitarray-2.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6ec84668dd7b937874a2b2c293cd14ba84f37be0d196dead852e0ada9815d807"},
+ {file = "bitarray-2.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2de9a31c34e543ae089fd2a5ced01292f725190e379921384f695e2d7184bd3"},
+ {file = "bitarray-2.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9521f49ae121a17c0a41e5112249e6fa7f6a571245b1118de81fb86e7c1bc1ce"},
+ {file = "bitarray-2.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6cc6545d6d76542aee3d18c1c9485fb7b9812b8df4ebe52c4535ec42081b48f"},
+ {file = "bitarray-2.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bbe1616425f71c0df5ef2e8755e878d9504d5a531acba58ab4273c52c117a"},
+ {file = "bitarray-2.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4bba8042ea6ab331ade91bc435d81ad72fddb098e49108610b0ce7780c14e68"},
+ {file = "bitarray-2.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a035da89c959d98afc813e3c62f052690d67cfd55a36592f25d734b70de7d4b0"},
+ {file = "bitarray-2.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6d70b1579da7fb71be5a841a1f965d19aca0ef27f629cfc07d06b09aafd0a333"},
+ {file = "bitarray-2.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:405b83bed28efaae6d86b6ab287c75712ead0adbfab2a1075a1b7ab47dad4d62"},
+ {file = "bitarray-2.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7eb8be687c50da0b397d5e0ab7ca200b5ebb639e79a9f5e285851d1944c94be9"},
+ {file = "bitarray-2.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eceb551dfeaf19c609003a69a0cf8264b0efd7abc3791a11dfabf4788daf0d19"},
+ {file = "bitarray-2.9.2-cp38-cp38-win32.whl", hash = "sha256:bb198c6ed1edbcdaf3d1fa3c9c9d1cdb7e179a5134ef5ee660b53cdec43b34e7"},
+ {file = "bitarray-2.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:648d2f2685590b0103c67a937c2fb9e09bcc8dfb166f0c7c77bd341902a6f5b3"},
+ {file = "bitarray-2.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ea816dc8f8e65841a8bbdd30e921edffeeb6f76efe6a1eb0da147b60d539d1cf"},
+ {file = "bitarray-2.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4d0e32530f941c41eddfc77600ec89b65184cb909c549336463a738fab3ed285"},
+ {file = "bitarray-2.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a22266fb416a3b6c258bf7f83c9fe531ba0b755a56986a81ad69dc0f3bcc070"},
+ {file = "bitarray-2.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6d3e80dd8239850f2604833ff3168b28909c8a9357abfed95632cccd17e3e7"},
+ {file = "bitarray-2.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f135e804986b12bf14f2cd1eb86674c47dea86c4c5f0fa13c88978876b97ebe6"},
+ {file = "bitarray-2.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87580c7f7d14f7ec401eda7adac1e2a25e95153e9c339872c8ae61b3208819a1"},
+ {file = "bitarray-2.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64b433e26993127732ac7b66a7821b2537c3044355798de7c5fcb0af34b8296f"},
+ {file = "bitarray-2.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e497c535f2a9b68c69d36631bf2dba243e05eb343b00b9c7bbdc8c601c6802d"},
+ {file = "bitarray-2.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e40b3cb9fa1edb4e0175d7c06345c49c7925fe93e39ef55ecb0bc40c906b0c09"},
+ {file = "bitarray-2.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f2f8692f95c9e377eb19ca519d30d1f884b02feb7e115f798de47570a359e43f"},
+ {file = "bitarray-2.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f0b84fc50b6dbeced4fa390688c07c10a73222810fb0e08392bd1a1b8259de36"},
+ {file = "bitarray-2.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d656ad38c942e38a470ddbce26b5020e08e1a7ea86b8fd413bb9024b5189993a"},
+ {file = "bitarray-2.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6ab0f1dbfe5070db98771a56aa14797595acd45a1af9eadfb193851a270e7996"},
+ {file = "bitarray-2.9.2-cp39-cp39-win32.whl", hash = "sha256:0a99b23ac845a9ea3157782c97465e6ae026fe0c7c4c1ed1d88f759fd6ea52d9"},
+ {file = "bitarray-2.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:9bbcfc7c279e8d74b076e514e669b683f77b4a2a328585b3f16d4c5259c91222"},
+ {file = "bitarray-2.9.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:43847799461d8ba71deb4d97b47250c2c2fb66d82cd3cb8b4caf52bb97c03034"},
+ {file = "bitarray-2.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f44381b0a4bdf64416082f4f0e7140377ae962c0ced6f983c6d7bbfc034040"},
+ {file = "bitarray-2.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a484061616fb4b158b80789bd3cb511f399d2116525a8b29b6334c68abc2310f"},
+ {file = "bitarray-2.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ff9e38356cc803e06134cf8ae9758e836ccd1b793135ef3db53c7c5d71e93bc"},
+ {file = "bitarray-2.9.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b44105792fbdcfbda3e26ee88786790fda409da4c71f6c2b73888108cf8f062f"},
+ {file = "bitarray-2.9.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7e913098de169c7fc890638ce5e171387363eb812579e637c44261460ac00aa2"},
+ {file = "bitarray-2.9.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6fe315355cdfe3ed22ef355b8bdc81a805ca4d0949d921576560e5b227a1112"},
+ {file = "bitarray-2.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f708e91fdbe443f3bec2df394ed42328fb9b0446dff5cb4199023ac6499e09fd"},
+ {file = "bitarray-2.9.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b7b09489b71f9f1f64c0fa0977e250ec24500767dab7383ba9912495849cadf"},
+ {file = "bitarray-2.9.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:128cc3488176145b9b137fdcf54c1c201809bbb8dd30b260ee40afe915843b43"},
+ {file = "bitarray-2.9.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:21f21e7f56206be346bdbda2a6bdb2165a5e6a11821f88fd4911c5a6bbbdc7e2"},
+ {file = "bitarray-2.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f4dd3af86dd8a617eb6464622fb64ca86e61ce99b59b5c35d8cd33f9c30603d"},
+ {file = "bitarray-2.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6465de861aff7a2559f226b37982007417eab8c3557543879987f58b453519bd"},
+ {file = "bitarray-2.9.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbaf2bb71d6027152d603f1d5f31e0dfd5e50173d06f877bec484e5396d4594b"},
+ {file = "bitarray-2.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2f32948c86e0d230a296686db28191b67ed229756f84728847daa0c7ab7406e3"},
+ {file = "bitarray-2.9.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be94e5a685e60f9d24532af8fe5c268002e9016fa80272a94727f435de3d1003"},
+ {file = "bitarray-2.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5cc9381fd54f3c23ae1039f977bfd6d041a5c3c1518104f616643c3a5a73b15"},
+ {file = "bitarray-2.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd926e8ae4d1ed1ac4a8f37212a62886292f692bc1739fde98013bf210c2d175"},
+ {file = "bitarray-2.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:461a3dafb9d5fda0bb3385dc507d78b1984b49da3fe4c6d56c869a54373b7008"},
+ {file = "bitarray-2.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:393cb27fd859af5fd9c16eb26b1c59b17b390ff66b3ae5d0dd258270191baf13"},
+ {file = "bitarray-2.9.2.tar.gz", hash = "sha256:a8f286a51a32323715d77755ed959f94bef13972e9a2fe71b609e40e6d27957e"},
+]
+
+[[package]]
+name = "blinker"
+version = "1.6.2"
+description = "Fast, simple object-to-object and broadcast signaling"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"},
+ {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"},
+]
+
+[[package]]
+name = "boto3"
+version = "1.28.8"
+description = "The AWS SDK for Python"
+optional = false
+python-versions = ">= 3.7"
+files = [
+ {file = "boto3-1.28.8-py3-none-any.whl", hash = "sha256:7132ac3f3a9c28b84dcc344cfb439d37d2c5ab45f6b577358fc9aeba5d5aab63"},
+ {file = "boto3-1.28.8.tar.gz", hash = "sha256:cf88309d9b8cd9a2fb0c8049cb4b217b4e9dcb55bf670d6054b0bbe2eef25e57"},
+]
+
+[package.dependencies]
+botocore = ">=1.31.8,<1.32.0"
+jmespath = ">=0.7.1,<2.0.0"
+s3transfer = ">=0.6.0,<0.7.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
+
+[[package]]
+name = "botocore"
+version = "1.31.8"
+description = "Low-level, data-driven core of boto 3."
+optional = false
+python-versions = ">= 3.7"
+files = [
+ {file = "botocore-1.31.8-py3-none-any.whl", hash = "sha256:61ba7efaa6305c1928b9b3fbb6f780cbfbd762e19008d20c11ba52b47f20e1b0"},
+ {file = "botocore-1.31.8.tar.gz", hash = "sha256:092baa2168ae78080b0c28011527bfc11d8debd3767aa1e9a4ce8a91fd9943a2"},
+]
+
+[package.dependencies]
+jmespath = ">=0.7.1,<2.0.0"
+python-dateutil = ">=2.1,<3.0.0"
+urllib3 = ">=1.25.4,<1.27"
+
+[package.extras]
+crt = ["awscrt (==0.16.26)"]
+
+[[package]]
+name = "cachetools"
+version = "5.3.2"
+description = "Extensible memoizing collections and decorators"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"},
+ {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"},
+]
+
+[[package]]
+name = "cassandra-driver"
+version = "3.21.0"
+description = "DataStax Driver for Apache Cassandra"
+optional = false
+python-versions = "*"
+files = [
+ {file = "cassandra-driver-3.21.0.tar.gz", hash = "sha256:ee976a061a7f981c0b34f4564f6fc171a86de87a67c4057c4f04f2713202e157"},
+ {file = "cassandra_driver-3.21.0-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:3a1e3ca35ad13084048ce901954cdf18a584146d10be46539b85412c3d45d5ac"},
+ {file = "cassandra_driver-3.21.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:daf280b6cade9fe2cb54145621b6001a765985709a735a7695ea7ea2de0ab751"},
+ {file = "cassandra_driver-3.21.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:ec5ee471348ec914d7c4f64584bd8bd7e1b33d3821917ff09054244d32918117"},
+ {file = "cassandra_driver-3.21.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7b0cf5aa2bdd215179222b2f033bc39f19ae123bdaad6b16fc37b5b1ec3d5b28"},
+ {file = "cassandra_driver-3.21.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ff477931d4b5f1db16b4899bb0a7e46c78d72b8470bdd07e794cbe70dc188199"},
+ {file = "cassandra_driver-3.21.0-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:d66d4918f85c01da85e0cc6e0c5ed51c3e488c0196d13c0a0abd4e3414b6fa18"},
+ {file = "cassandra_driver-3.21.0-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:5a49a5a37b0768eb48eaf12c53c91b0653ace5d083af4346e6603fa28beb62a0"},
+ {file = "cassandra_driver-3.21.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:2b3a9ef665872e89449cbd6e6dc723d0ed9be5a1c831236761e8771a38f1d213"},
+ {file = "cassandra_driver-3.21.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:77f43afc6b360e0d31a537938fbd60bebc1aed710e186755ccb2e00f8b05e8d0"},
+ {file = "cassandra_driver-3.21.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:3b79cc2675ff6e2ec88b388379f5c69ed749b84a9b22d752905e5f1683cd4629"},
+ {file = "cassandra_driver-3.21.0-cp35-cp35m-win32.whl", hash = "sha256:f37b0710380063c5f42358a9534217f7a0fcd8a08985a43d4137fb954fcacb41"},
+ {file = "cassandra_driver-3.21.0-cp35-cp35m-win_amd64.whl", hash = "sha256:d9af252db0a0dcd2fa0f27ff0862c8dbd8a4ea03d2e7dc7bcbe3bbae0a02eabf"},
+ {file = "cassandra_driver-3.21.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:1ae6622d5c338eacac0ade1ef416fd01a16187be729d824ca89c41954e9c0548"},
+ {file = "cassandra_driver-3.21.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:56505e9c8cab996c892f5d688234c0f87fd792d2387c545048ee3e5f2a48eaca"},
+ {file = "cassandra_driver-3.21.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d49c8cbbcae2c5dc3b695e31c0e993e3c9c00143d9ba464bbf0efb61b7bb48c6"},
+ {file = "cassandra_driver-3.21.0-cp36-cp36m-win32.whl", hash = "sha256:b1c5e02b24090623f00febf3531f48aa0387f8c5cc1926075989266e70a894ac"},
+ {file = "cassandra_driver-3.21.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5c4aaedcda980f8fe33b12f08a2197f54dc4c48f61072aec490ef7aa8309eb3f"},
+ {file = "cassandra_driver-3.21.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:0eb6cf8421ecd6925030e115236072bd553730bce7c82b8889830b21159813e8"},
+ {file = "cassandra_driver-3.21.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4d6cbe0d005a71a28e8d6251139367745c7d3dc1ac152822b1de95af219a0a0c"},
+ {file = "cassandra_driver-3.21.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6648282da46bc18f2502249aa35535c12fc22e5d2d6e41bdc6809d9d845787f7"},
+ {file = "cassandra_driver-3.21.0-cp37-cp37m-win32.whl", hash = "sha256:a883979390e2f82f0fab37c8c1bf8cd0ad5a5cc2f5f8bd57f31718870366b083"},
+ {file = "cassandra_driver-3.21.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fdbb1a5a8c02e9e8af95713aafe448020b9c8d04e4ac69486ab2fa4894a6146a"},
+ {file = "cassandra_driver-3.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:feb638c3be9f71f36823cedb92f3fa55eb47ada0dabf528f6ad20020afb3e8f9"},
+ {file = "cassandra_driver-3.21.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5d54fb30aa3ecaa943025bc98fed8fa4bd653a7efba9c7573f493085b7442c97"},
+ {file = "cassandra_driver-3.21.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ff248431521f4bf522b3f600f3a3a88b622a0a7b64326a5115f343d0b18c028e"},
+ {file = "cassandra_driver-3.21.0-cp38-cp38-win32.whl", hash = "sha256:5c942123f4cb42a188ab9a95f0ca27cbc48c263cc8c061ecf9cd959c23efbb04"},
+ {file = "cassandra_driver-3.21.0-cp38-cp38-win_amd64.whl", hash = "sha256:71d5f3e322feea3d8a8f32d593598b6f9790e3f7e28cadf3bce98af170984f5c"},
+]
+
+[package.dependencies]
+futures = "*"
+geomet = ">=0.1,<0.2"
+six = ">=1.9"
+
+[package.extras]
+graph = ["gremlinpython (==3.3.4)"]
+
+[[package]]
+name = "certifi"
+version = "2024.7.4"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
+ {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
+]
+
+[[package]]
+name = "cffi"
+version = "1.16.0"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
+ {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
+ {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
+ {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
+ {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
+ {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
+ {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
+ {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
+ {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
+ {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
+ {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
+ {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
+ {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
+ {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
+ {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
+[[package]]
+name = "cfgv"
+version = "3.4.0"
+description = "Validate configuration and produce human readable error messages."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
+ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.3.2"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
+ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
+]
+
+[[package]]
+name = "ciso8601"
+version = "2.3.1"
+description = "Fast ISO8601 date time parser for Python written in C"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ciso8601-2.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:57db9a28e87f9e4fccba643fb70a9ba1515adc5e1325508eb2c10dd96620314c"},
+ {file = "ciso8601-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c59646197ddbf84909b6c31d55f744cfeef51811e3910b61d0f58f2885823fd"},
+ {file = "ciso8601-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a25da209193134842cd573464a5323f46fcc3ed781b633f15a34793ba7e1064"},
+ {file = "ciso8601-2.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ae83f4e60fc7e260a4188e4ec4ac1bdd40bdb382eeda92fc266c5aa2f0a1ee"},
+ {file = "ciso8601-2.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2c1ef17d1ea52a39b2dce6535583631ae4bfb65c76f0ee8c99413a6861a46c9e"},
+ {file = "ciso8601-2.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3771049ba29bd1077588c0a24be1d53f7493e7cc686b2caa92f7cae129636a0e"},
+ {file = "ciso8601-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:55381365366dacb57207cec610d26c9a6c0d237cb65a0cf67a2baaa5299f2366"},
+ {file = "ciso8601-2.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f25647803c9a5aaaed130c53bbec7ea06a4f95ba5c7016f59e444b4ef7ac39e"},
+ {file = "ciso8601-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:473288cd63efe6a2cf3f4b5f90394e53095358ccb13d6128f87a2da85d0f389b"},
+ {file = "ciso8601-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:121d27c55f4455eaa27ba3bd602beca915df9a352f235e935636a4660321070e"},
+ {file = "ciso8601-2.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef44cb4dc83f37019a356c7a72692cbe17072456f4879ca6bc0339f67eee5d00"},
+ {file = "ciso8601-2.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:364702e338212b6c1a8643d9399ada21560cf132f363853473560625cb4207f1"},
+ {file = "ciso8601-2.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8acb45545e6a654310c6ef788aacb2d73686646c414ceacdd9f5f78a83165af5"},
+ {file = "ciso8601-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:99addd8b113f85fac549167073f317a318cd2b5841552598ceb97b97c5708a38"},
+ {file = "ciso8601-2.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f39bb5936debf21c52e5d52b89f26857c303da80c43a72883946096a6ef5e561"},
+ {file = "ciso8601-2.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:21cf83ca945bb26ecd95364ae2c9ed0276378e5fe35ce1b64d4c6d5b33038ea3"},
+ {file = "ciso8601-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:013410263cba46748d2de29e9894341ae41223356cde7970478c32bd0984d10c"},
+ {file = "ciso8601-2.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b26935687ef1837b56997d8c61f1d789e698be58b261410e629eda9c89812141"},
+ {file = "ciso8601-2.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0d980a2a88030d4d8b2434623c250866a75b4979d289eba69bec445c51ace99f"},
+ {file = "ciso8601-2.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:87721de54e008fb1c4c3978553b05a9c417aa25b76ddf5702d6f7e8d9b109288"},
+ {file = "ciso8601-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:9f107a4c051e7c0416824279264d94f4ed3da0fbd82bd96ec3c3293426826de4"},
+ {file = "ciso8601-2.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:02ecbd7c8336c4e1c6bb725b898e29414ee92bdc0be6c72fb07036836b1ac867"},
+ {file = "ciso8601-2.3.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36525b1f380f4601533f4631c69911e44efb9cb50beab1da3248b0daa32bced4"},
+ {file = "ciso8601-2.3.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:874d20c6339e9096baaadfd1b9610bb8d5b373a0f2858cc06de8142b98d2129c"},
+ {file = "ciso8601-2.3.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:46a3663c2cf838f0149e1cdb8e4bdc95716e03cf2d5f803a6eb755d825896ebe"},
+ {file = "ciso8601-2.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e8e76825f80ce313d75bbbef1d3b8bd9e0ce31dbc157d1981e9593922c9983e7"},
+ {file = "ciso8601-2.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850889813f3135e0aa18f0aaec64249dd81d36a1b9bce60bb45182930c86663"},
+ {file = "ciso8601-2.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c690ac24ec3407f68cdfd5e032c6cb18126ef33d6c4b3db0669b9cbb8c96bd4"},
+ {file = "ciso8601-2.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:024c52d5d0670f15ca3dc53eff7345b6eaee22fba929675f6a408f9d1e159d98"},
+ {file = "ciso8601-2.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7ae2c3442d042de5330672d0d28486ed92f9d7c6dc010943aa618fd361d4638"},
+ {file = "ciso8601-2.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:22128f0def36fa3c4cf0c482a216e8b8ad722def08bc11c07438eff82bdcd02a"},
+ {file = "ciso8601-2.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:025859ec286a994aa3f2120c0f27d053b719cabc975398338374f2cc1f961125"},
+ {file = "ciso8601-2.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2a64ff58904d4418d60fa9619014ae820ae21f7aef58da46df78a4c647f951ec"},
+ {file = "ciso8601-2.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d1f85c0b7fa742bbfd18177137ccbaa3f867dd06157f91595075bb959a733048"},
+ {file = "ciso8601-2.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ac59453664781dfddebee51f9a36e41819993823fdb09ddc0ce0e4bd3ff0c3"},
+ {file = "ciso8601-2.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:eaecca7e0c3ef9e8f5e963e212b083684e849f9a9bb25834d3042363223a73cd"},
+ {file = "ciso8601-2.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ad8f417c45eea973a694599b96f40d841215bfee352cb9963383e8d66b309981"},
+ {file = "ciso8601-2.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:b869396e9756a7c0696d8eb69ce1d8980bea5e25c86e5996b10d78c900a4362c"},
+ {file = "ciso8601-2.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7eb7b5ef8714d3d1fe9f3256b7a679ad783da899a0b7503a5ace78186735f840"},
+ {file = "ciso8601-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:02828107880848ff497971ebc98e6dc851ad7af8ec14a58089e0e11f3111cad6"},
+ {file = "ciso8601-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:566b4a8b2f9717e54ffcdd732a7c8051a91da30a60a4f1dafb62e303a1dbac69"},
+ {file = "ciso8601-2.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58a749d63f28c2eda71416c9d6014113b0748abf5fd14c502b01bd515502fedf"},
+ {file = "ciso8601-2.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:cb135de0e3b8feb7e74a4f7a234e8c8545957fe8d26316a1a549553f425c629d"},
+ {file = "ciso8601-2.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:695583810836a42945084b33621b22b0309701c6916689f6a3588fa44c5bc413"},
+ {file = "ciso8601-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:21204d98496cf5c0511dc21533be55c2a2d34b8c65603946a116812ffbae3b2d"},
+ {file = "ciso8601-2.3.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c29ea2b03dee2dc0a5d3e4a0b7d7768c597781e9fa451fe1025600f7cb55a89"},
+ {file = "ciso8601-2.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7533256af90724b8b7a707dcd1be4b67989447595c8e1e1c28399d4fd51dac50"},
+ {file = "ciso8601-2.3.1-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4bc9d577c0d1e57532513fc2899f5231727e28981a426767f7fa13dacb18c06"},
+ {file = "ciso8601-2.3.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:4e30501eed43eea7ef64f032c81cd1d8b2020035cbdcefad40db72e2f3bc97ff"},
+ {file = "ciso8601-2.3.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:070f568de3bc269268296cb9265704dc5fcb9d4c12b1f1c67536624174df5d09"},
+ {file = "ciso8601-2.3.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:9065053c034c80c0afd74c71a4906675d07078a05cfd1cb5ff70661378cdbe60"},
+ {file = "ciso8601-2.3.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac00d293cdb3d1a5c78e09b3d75c7b0292ab45d5b26853b436ff5087eba2165"},
+ {file = "ciso8601-2.3.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:06941e2ee46701f083aeb21d13eb762d74d5ed6c46ff22119f27a42ed6edc8f9"},
+ {file = "ciso8601-2.3.1.tar.gz", hash = "sha256:3212c7ffe5d8080270548b5f2692ffd2039683b6628a8d2ad456122cc5793c4c"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.3"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
+ {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "cmem-cmempy"
+version = "21.2.3"
+description = "API wrapper for eccenca Corporate Memory"
+optional = false
+python-versions = "*"
+files = [
+ {file = "cmem_cmempy-21.2.3-py3-none-any.whl", hash = "sha256:6b64b695e954df194ee8947ff203c63b35839dd2274dbef97f14c3fbefdac8fd"},
+ {file = "cmem_cmempy-21.2.3.tar.gz", hash = "sha256:81c7f07291d4287a828db14bdc68a54497ada8ba19cac87048432a310df818aa"},
+]
+
+[package.dependencies]
+certifi = "*"
+pyparsing = "*"
+rdflib = "*"
+requests = "*"
+six = "*"
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.2.7"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"},
+ {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"},
+ {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"},
+ {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"},
+ {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"},
+ {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"},
+ {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"},
+ {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"},
+ {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"},
+ {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"},
+ {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"},
+ {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"},
+ {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"},
+ {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"},
+ {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"},
+ {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"},
+ {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"},
+ {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"},
+ {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"},
+ {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"},
+ {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"},
+ {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"},
+ {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"},
+ {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"},
+ {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"},
+ {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"},
+ {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"},
+ {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"},
+ {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"},
+ {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"},
+]
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
+
+[package.extras]
+toml = ["tomli"]
+
+[[package]]
+name = "crontab"
+version = "1.0.1"
+description = "Parse and use crontab schedules in Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "crontab-1.0.1.tar.gz", hash = "sha256:89477e3f93c81365e738d5ee2659509e6373bb2846de13922663e79aa74c6b91"},
+]
+
+[[package]]
+name = "cryptography"
+version = "42.0.8"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"},
+ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"},
+ {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"},
+ {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"},
+ {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"},
+ {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"},
+ {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"},
+ {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
+docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
+nox = ["nox"]
+pep8test = ["check-sdist", "click", "mypy", "ruff"]
+sdist = ["build"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test-randomorder = ["pytest-randomly"]
+
+[[package]]
+name = "databend-py"
+version = "0.4.6"
+description = "Python driver with native interface for Databend"
+optional = false
+python-versions = ">=3.4, <4"
+files = [
+ {file = "databend-py-0.4.6.tar.gz", hash = "sha256:450ffabd75e5adff29cac394a975df98911ca468a45d8c12fe3eb5e85506c9b0"},
+ {file = "databend_py-0.4.6-py3-none-any.whl", hash = "sha256:937566f2b1fd21d096bb2743374cd2f0ed36fa97edc0ec505c93da2975a7717f"},
+]
+
+[package.dependencies]
+environs = "*"
+"mysql.connector" = "*"
+pytz = "*"
+requests = "*"
+
+[[package]]
+name = "databend-sqlalchemy"
+version = "0.2.4"
+description = "Databend dialect for SQLAlchemy."
+optional = false
+python-versions = ">=3.4, <4"
+files = [
+ {file = "databend_sqlalchemy-0.2.4-py3-none-any.whl", hash = "sha256:e50afe6dcb0c97830560ba8b9f0160d95f4e3b7199ad1955ee9c339fb28b91e6"},
+]
+
+[package.dependencies]
+databend-py = "*"
+"mysql.connector" = "*"
+sqlalchemy = "*"
+
+[package.extras]
+sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
+superset = ["apache-superset (>=1.4.1)"]
+
+[[package]]
+name = "defusedxml"
+version = "0.7.1"
+description = "XML bomb protection for Python stdlib modules"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
+ {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
+]
+
+[[package]]
+name = "deprecated"
+version = "1.2.14"
+description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
+ {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
+]
+
+[package.dependencies]
+wrapt = ">=1.10,<2"
+
+[package.extras]
+dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
+
+[[package]]
+name = "disposable-email-domains"
+version = "0.0.95"
+description = "A set of disposable email domains"
+optional = false
+python-versions = "*"
+files = [
+ {file = "disposable-email-domains-0.0.95.tar.gz", hash = "sha256:36fa1ee104c19e618a7ea33c12e1edf1d3cd5007411cb6d58262639f4256a983"},
+ {file = "disposable_email_domains-0.0.95-py2.py3-none-any.whl", hash = "sha256:38770e55db520f280bcf03fc51757d75f080128d7723ce4c41054ae2420ed161"},
+]
+
+[package.extras]
+dev = ["check-manifest"]
+
+[[package]]
+name = "distlib"
+version = "0.3.8"
+description = "Distribution utilities"
+optional = false
+python-versions = "*"
+files = [
+ {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
+ {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
+]
+
+[[package]]
+name = "dnspython"
+version = "2.6.1"
+description = "DNS toolkit"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"},
+ {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"},
+]
+
+[package.extras]
+dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
+dnssec = ["cryptography (>=41)"]
+doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
+doq = ["aioquic (>=0.9.25)"]
+idna = ["idna (>=3.6)"]
+trio = ["trio (>=0.23)"]
+wmi = ["wmi (>=1.5.1)"]
+
+[[package]]
+name = "e6data-python-connector"
+version = "1.1.9"
+description = "Client for the e6data distributed SQL Engine."
+optional = false
+python-versions = "*"
+files = [
+ {file = "e6data-python-connector-1.1.9.tar.gz", hash = "sha256:b8b3ea750b397ddc7aa250c46ed8b2ae00edab64267ffce9a76e4add69d79fe8"},
+]
+
+[package.dependencies]
+future = "*"
+grpcio = "*"
+grpcio-tools = "*"
+pycryptodome = "*"
+python-dateutil = "*"
+pytz = "*"
+sqlalchemy = ">=1.0.0"
+thrift = "*"
+
+[[package]]
+name = "elementpath"
+version = "4.1.5"
+description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and lxml"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"},
+ {file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"},
+]
+
+[package.extras]
+dev = ["Sphinx", "coverage", "flake8", "lxml", "lxml-stubs", "memory-profiler", "memray", "mypy", "tox", "xmlschema (>=2.0.0)"]
+
+[[package]]
+name = "environs"
+version = "10.2.0"
+description = "simplified environment variable parsing"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "environs-10.2.0-py3-none-any.whl", hash = "sha256:579dddb252ef4bb83a302df82a99c98f6f3db30f043d1b7acff36264b0bfdc69"},
+ {file = "environs-10.2.0.tar.gz", hash = "sha256:9513dd388c1eeb8e82f1ea5a701356abfb7a3d79925bff937ade67fe096e420d"},
+]
+
+[package.dependencies]
+marshmallow = ">=3.0.0"
+python-dotenv = "*"
+
+[package.extras]
+dev = ["environs[lint,tests]", "tox"]
+django = ["dj-database-url", "dj-email-url", "django-cache-url"]
+lint = ["flake8 (==7.0.0)", "flake8-bugbear (==23.11.28)", "mypy (==1.8.0)", "pre-commit (>=3.6,<4.0)"]
+tests = ["environs[django]", "pytest"]
+
+[[package]]
+name = "et-xmlfile"
+version = "1.1.0"
+description = "An implementation of lxml.xmlfile for the standard library"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"},
+ {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.2.0"
+description = "Backport of PEP 654 (exception groups)"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"},
+ {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"},
+]
+
+[package.extras]
+test = ["pytest (>=6)"]
+
+[[package]]
+name = "filelock"
+version = "3.13.1"
+description = "A platform independent file lock."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"},
+ {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
+typing = ["typing-extensions (>=4.8)"]
+
+[[package]]
+name = "flask"
+version = "2.3.2"
+description = "A simple framework for building complex web applications."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "Flask-2.3.2-py3-none-any.whl", hash = "sha256:77fd4e1249d8c9923de34907236b747ced06e5467ecac1a7bb7115ae0e9670b0"},
+ {file = "Flask-2.3.2.tar.gz", hash = "sha256:8c2f9abd47a9e8df7f0c3f091ce9497d011dc3b31effcf4c85a6e2b50f4114ef"},
+]
+
+[package.dependencies]
+blinker = ">=1.6.2"
+click = ">=8.1.3"
+importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
+itsdangerous = ">=2.1.2"
+Jinja2 = ">=3.1.2"
+Werkzeug = ">=2.3.3"
+
+[package.extras]
+async = ["asgiref (>=3.2)"]
+dotenv = ["python-dotenv"]
+
+[[package]]
+name = "flask-limiter"
+version = "3.3.1"
+description = "Rate limiting for flask applications"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Flask-Limiter-3.3.1.tar.gz", hash = "sha256:2b99fec0cfc44f490bd729da52bb89c5c4158f38812d0f3854c01d0a83664923"},
+ {file = "Flask_Limiter-3.3.1-py3-none-any.whl", hash = "sha256:3451fb8d84f50007753b799831c57c59c1eb3432cc9754cc4b7e41a88d8bdf51"},
+]
+
+[package.dependencies]
+Flask = ">=2"
+limits = ">=2.8"
+ordered-set = ">4,<5"
+rich = ">=12,<14"
+typing-extensions = ">=4"
+
+[package.extras]
+memcached = ["limits[memcached]"]
+mongodb = ["limits[mongodb]"]
+redis = ["limits[redis]"]
+
+[[package]]
+name = "flask-login"
+version = "0.6.0"
+description = "User authentication and session management for Flask."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Flask-Login-0.6.0.tar.gz", hash = "sha256:aa84fcfb4c3cf09ca58c08e816b7bce73f1349ba1cf13d00d8dffc5872d5fcf6"},
+ {file = "Flask_Login-0.6.0-py3-none-any.whl", hash = "sha256:5cb01ce4dc253967b6ac722a11e46de83b6272ef7a19cc7b5725ae636916d04d"},
+]
+
+[package.dependencies]
+Flask = ">=1.0.4"
+Werkzeug = ">=1.0.1"
+
+[[package]]
+name = "flask-mail"
+version = "0.9.1"
+description = "Flask extension for sending email"
+optional = false
+python-versions = "*"
+files = [
+ {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"},
+]
+
+[package.dependencies]
+blinker = "*"
+Flask = "*"
+
+[[package]]
+name = "flask-migrate"
+version = "2.5.2"
+description = "SQLAlchemy database migrations for Flask applications using Alembic"
+optional = false
+python-versions = "*"
+files = [
+ {file = "Flask-Migrate-2.5.2.tar.gz", hash = "sha256:a96ff1875a49a40bd3e8ac04fce73fdb0870b9211e6168608cbafa4eb839d502"},
+ {file = "Flask_Migrate-2.5.2-py2.py3-none-any.whl", hash = "sha256:6fb038be63d4c60727d5dfa5f581a6189af5b4e2925bc378697b4f0a40cfb4e1"},
+]
+
+[package.dependencies]
+alembic = ">=0.7"
+Flask = ">=0.9"
+Flask-SQLAlchemy = ">=1.0"
+
+[[package]]
+name = "flask-restful"
+version = "0.3.10"
+description = "Simple framework for creating REST APIs"
+optional = false
+python-versions = "*"
+files = [
+ {file = "Flask-RESTful-0.3.10.tar.gz", hash = "sha256:fe4af2ef0027df8f9b4f797aba20c5566801b6ade995ac63b588abf1a59cec37"},
+ {file = "Flask_RESTful-0.3.10-py2.py3-none-any.whl", hash = "sha256:1cf93c535172f112e080b0d4503a8d15f93a48c88bdd36dd87269bdaf405051b"},
+]
+
+[package.dependencies]
+aniso8601 = ">=0.82"
+Flask = ">=0.8"
+pytz = "*"
+six = ">=1.3.0"
+
+[package.extras]
+docs = ["sphinx"]
+
+[[package]]
+name = "flask-sqlalchemy"
+version = "2.5.1"
+description = "Adds SQLAlchemy support to your Flask application."
+optional = false
+python-versions = ">= 2.7, != 3.0.*, != 3.1.*, != 3.2.*, != 3.3.*"
+files = [
+ {file = "Flask-SQLAlchemy-2.5.1.tar.gz", hash = "sha256:2bda44b43e7cacb15d4e05ff3cc1f8bc97936cc464623424102bfc2c35e95912"},
+ {file = "Flask_SQLAlchemy-2.5.1-py2.py3-none-any.whl", hash = "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390"},
+]
+
+[package.dependencies]
+Flask = ">=0.10"
+SQLAlchemy = ">=0.8.0"
+
+[[package]]
+name = "flask-talisman"
+version = "0.7.0"
+description = "HTTP security headers for Flask."
+optional = false
+python-versions = "*"
+files = [
+ {file = "flask-talisman-0.7.0.tar.gz", hash = "sha256:468131464a249274ed226efc21b372518f442487e58918ccab8357eaa638fd1f"},
+ {file = "flask_talisman-0.7.0-py2.py3-none-any.whl", hash = "sha256:eaa754f4b771dfbe473843391d69643b79e3a38c865790011ac5e4179c68e3ec"},
+]
+
+[package.dependencies]
+six = ">=1.9.0"
+
+[[package]]
+name = "flask-wtf"
+version = "1.1.1"
+description = "Form rendering, validation, and CSRF protection for Flask with WTForms."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Flask-WTF-1.1.1.tar.gz", hash = "sha256:41c4244e9ae626d63bed42ae4785b90667b885b1535d5a4095e1f63060d12aa9"},
+ {file = "Flask_WTF-1.1.1-py3-none-any.whl", hash = "sha256:7887d6f1ebb3e17bf648647422f0944c9a469d0fcf63e3b66fb9a83037e38b2c"},
+]
+
+[package.dependencies]
+Flask = "*"
+itsdangerous = "*"
+WTForms = "*"
+
+[package.extras]
+email = ["email-validator"]
+
+[[package]]
+name = "freezegun"
+version = "1.2.1"
+description = "Let your Python tests travel through time"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "freezegun-1.2.1-py3-none-any.whl", hash = "sha256:15103a67dfa868ad809a8f508146e396be2995172d25f927e48ce51c0bf5cb09"},
+ {file = "freezegun-1.2.1.tar.gz", hash = "sha256:b4c64efb275e6bc68dc6e771b17ffe0ff0f90b81a2a5189043550b6519926ba4"},
+]
+
+[package.dependencies]
+python-dateutil = ">=2.7"
+
+[[package]]
+name = "funcy"
+version = "1.13"
+description = "A fancy and practical functional tools"
+optional = false
+python-versions = "*"
+files = [
+ {file = "funcy-1.13-py2.py3-none-any.whl", hash = "sha256:141950038e72bdc2d56fa82468586a1d1291b9cc9346daaaa322dffed1d1da6e"},
+ {file = "funcy-1.13.tar.gz", hash = "sha256:918f333f675d9841ec7d77b9f0d5a272ed290393a33c8ef20e605847de89b1c3"},
+]
+
+[[package]]
+name = "future"
+version = "0.18.3"
+description = "Clean single-source support for Python 3 and 2"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"},
+]
+
+[[package]]
+name = "futures"
+version = "3.0.5"
+description = "Backport of the concurrent.futures package from Python 3.2"
+optional = false
+python-versions = "*"
+files = [
+ {file = "futures-3.0.5-py2-none-any.whl", hash = "sha256:f7f16b6bf9653a918a03f1f2c2d62aac0cd64b1bc088e93ea279517f6b61120b"},
+ {file = "futures-3.0.5.tar.gz", hash = "sha256:0542525145d5afc984c88f914a0c85c77527f65946617edb5274f72406f981df"},
+]
+
+[[package]]
+name = "geomet"
+version = "0.1.2"
+description = "GeoJSON <-> WKT/WKB conversion utilities"
+optional = false
+python-versions = "*"
+files = [
+ {file = "geomet-0.1.2.tar.gz", hash = "sha256:cef6c73cfc0c4ea3961e16a6979dce75ef0298f0023cbd482855134dcdf7c010"},
+]
+
+[package.dependencies]
+click = "*"
+six = "*"
+
+[[package]]
+name = "gevent"
+version = "23.9.1"
+description = "Coroutine-based network library"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "gevent-23.9.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:a3c5e9b1f766a7a64833334a18539a362fb563f6c4682f9634dea72cbe24f771"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b101086f109168b23fa3586fccd1133494bdb97f86920a24dc0b23984dc30b69"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36a549d632c14684bcbbd3014a6ce2666c5f2a500f34d58d32df6c9ea38b6535"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:272cffdf535978d59c38ed837916dfd2b5d193be1e9e5dcc60a5f4d5025dd98a"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcb8612787a7f4626aa881ff15ff25439561a429f5b303048f0fca8a1c781c39"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d57737860bfc332b9b5aa438963986afe90f49645f6e053140cfa0fa1bdae1ae"},
+ {file = "gevent-23.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5f3c781c84794926d853d6fb58554dc0dcc800ba25c41d42f6959c344b4db5a6"},
+ {file = "gevent-23.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dbb22a9bbd6a13e925815ce70b940d1578dbe5d4013f20d23e8a11eddf8d14a7"},
+ {file = "gevent-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:707904027d7130ff3e59ea387dddceedb133cc742b00b3ffe696d567147a9c9e"},
+ {file = "gevent-23.9.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:45792c45d60f6ce3d19651d7fde0bc13e01b56bb4db60d3f32ab7d9ec467374c"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e24c2af9638d6c989caffc691a039d7c7022a31c0363da367c0d32ceb4a0648"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1ead6863e596a8cc2a03e26a7a0981f84b6b3e956101135ff6d02df4d9a6b07"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65883ac026731ac112184680d1f0f1e39fa6f4389fd1fc0bf46cc1388e2599f9"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7af500da05363e66f122896012acb6e101a552682f2352b618e541c941a011"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c3e5d2fa532e4d3450595244de8ccf51f5721a05088813c1abd93ad274fe15e7"},
+ {file = "gevent-23.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c84d34256c243b0a53d4335ef0bc76c735873986d478c53073861a92566a8d71"},
+ {file = "gevent-23.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ada07076b380918829250201df1d016bdafb3acf352f35e5693b59dceee8dd2e"},
+ {file = "gevent-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:921dda1c0b84e3d3b1778efa362d61ed29e2b215b90f81d498eb4d8eafcd0b7a"},
+ {file = "gevent-23.9.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ed7a048d3e526a5c1d55c44cb3bc06cfdc1947d06d45006cc4cf60dedc628904"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c1abc6f25f475adc33e5fc2dbcc26a732608ac5375d0d306228738a9ae14d3b"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4368f341a5f51611411ec3fc62426f52ac3d6d42eaee9ed0f9eebe715c80184e"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52b4abf28e837f1865a9bdeef58ff6afd07d1d888b70b6804557e7908032e599"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52e9f12cd1cda96603ce6b113d934f1aafb873e2c13182cf8e86d2c5c41982ea"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:de350fde10efa87ea60d742901e1053eb2127ebd8b59a7d3b90597eb4e586599"},
+ {file = "gevent-23.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fde6402c5432b835fbb7698f1c7f2809c8d6b2bd9d047ac1f5a7c1d5aa569303"},
+ {file = "gevent-23.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dd6c32ab977ecf7c7b8c2611ed95fa4aaebd69b74bf08f4b4960ad516861517d"},
+ {file = "gevent-23.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:455e5ee8103f722b503fa45dedb04f3ffdec978c1524647f8ba72b4f08490af1"},
+ {file = "gevent-23.9.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:7ccf0fd378257cb77d91c116e15c99e533374a8153632c48a3ecae7f7f4f09fe"},
+ {file = "gevent-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d163d59f1be5a4c4efcdd13c2177baaf24aadf721fdf2e1af9ee54a998d160f5"},
+ {file = "gevent-23.9.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7532c17bc6c1cbac265e751b95000961715adef35a25d2b0b1813aa7263fb397"},
+ {file = "gevent-23.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:78eebaf5e73ff91d34df48f4e35581ab4c84e22dd5338ef32714264063c57507"},
+ {file = "gevent-23.9.1-cp38-cp38-win32.whl", hash = "sha256:f632487c87866094546a74eefbca2c74c1d03638b715b6feb12e80120960185a"},
+ {file = "gevent-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:62d121344f7465e3739989ad6b91f53a6ca9110518231553fe5846dbe1b4518f"},
+ {file = "gevent-23.9.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:bf456bd6b992eb0e1e869e2fd0caf817f0253e55ca7977fd0e72d0336a8c1c6a"},
+ {file = "gevent-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43daf68496c03a35287b8b617f9f91e0e7c0d042aebcc060cadc3f049aadd653"},
+ {file = "gevent-23.9.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7c28e38dcde327c217fdafb9d5d17d3e772f636f35df15ffae2d933a5587addd"},
+ {file = "gevent-23.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fae8d5b5b8fa2a8f63b39f5447168b02db10c888a3e387ed7af2bd1b8612e543"},
+ {file = "gevent-23.9.1-cp39-cp39-win32.whl", hash = "sha256:2c7b5c9912378e5f5ccf180d1fdb1e83f42b71823483066eddbe10ef1a2fcaa2"},
+ {file = "gevent-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:a2898b7048771917d85a1d548fd378e8a7b2ca963db8e17c6d90c76b495e0e2b"},
+ {file = "gevent-23.9.1.tar.gz", hash = "sha256:72c002235390d46f94938a96920d8856d4ffd9ddf62a303a0d7c118894097e34"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.12.2", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""}
+greenlet = {version = ">=2.0.0", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""}
+"zope.event" = "*"
+"zope.interface" = "*"
+
+[package.extras]
+dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"]
+docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"]
+monitor = ["psutil (>=5.7.0)"]
+recommended = ["cffi (>=1.12.2)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"]
+test = ["cffi (>=1.12.2)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests", "setuptools"]
+
+[[package]]
+name = "google-api-python-client"
+version = "1.7.11"
+description = "Google API Client Library for Python"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"
+files = [
+ {file = "google-api-python-client-1.7.11.tar.gz", hash = "sha256:a8a88174f66d92aed7ebbd73744c2c319b4b1ce828e565f9ec721352d2e2fb8c"},
+ {file = "google_api_python_client-1.7.11-py2-none-any.whl", hash = "sha256:3121d55d106ef1a2756e8074239512055bd99eb44da417b3dd680f9a1385adec"},
+]
+
+[package.dependencies]
+google-auth = ">=1.4.1"
+google-auth-httplib2 = ">=0.0.3"
+httplib2 = ">=0.9.2,<1dev"
+six = ">=1.6.1,<2dev"
+uritemplate = ">=3.0.0,<4dev"
+
+[[package]]
+name = "google-auth"
+version = "2.26.1"
+description = "Google Authentication Library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "google-auth-2.26.1.tar.gz", hash = "sha256:54385acca5c0fbdda510cd8585ba6f3fcb06eeecf8a6ecca39d3ee148b092590"},
+ {file = "google_auth-2.26.1-py2.py3-none-any.whl", hash = "sha256:2c8b55e3e564f298122a02ab7b97458ccfcc5617840beb5d0ac757ada92c9780"},
+]
+
+[package.dependencies]
+cachetools = ">=2.0.0,<6.0"
+pyasn1-modules = ">=0.2.1"
+rsa = ">=3.1.4,<5"
+
+[package.extras]
+aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"]
+enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
+pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
+reauth = ["pyu2f (>=0.1.5)"]
+requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
+
+[[package]]
+name = "google-auth-httplib2"
+version = "0.2.0"
+description = "Google Authentication Library: httplib2 transport"
+optional = false
+python-versions = "*"
+files = [
+ {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"},
+ {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"},
+]
+
+[package.dependencies]
+google-auth = "*"
+httplib2 = ">=0.19.0"
+
+[[package]]
+name = "google-auth-oauthlib"
+version = "1.2.0"
+description = "Google Authentication Library"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "google-auth-oauthlib-1.2.0.tar.gz", hash = "sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8"},
+ {file = "google_auth_oauthlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf"},
+]
+
+[package.dependencies]
+google-auth = ">=2.15.0"
+requests-oauthlib = ">=0.7.0"
+
+[package.extras]
+tool = ["click (>=6.0.0)"]
+
+[[package]]
+name = "greenlet"
+version = "2.0.2"
+description = "Lightweight in-process concurrent programming"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
+files = [
+ {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"},
+ {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"},
+ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
+ {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
+ {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
+ {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
+ {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
+ {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
+ {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
+ {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"},
+ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"},
+ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
+ {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
+ {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
+ {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
+ {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
+ {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
+ {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
+ {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"},
+ {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"},
+ {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"},
+ {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"},
+ {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"},
+ {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"},
+ {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"},
+ {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"},
+ {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"},
+ {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"},
+ {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"},
+ {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"},
+ {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"},
+ {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"},
+ {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"},
+ {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"},
+ {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"},
+ {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"},
+ {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"},
+ {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"},
+ {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"},
+ {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"},
+ {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"},
+ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
+ {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
+ {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
+ {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
+ {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
+ {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
+ {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
+ {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"},
+ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"},
+ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
+ {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
+ {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
+ {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
+ {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
+ {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
+ {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
+ {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"},
+ {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"},
+ {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"},
+ {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"},
+ {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"},
+ {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"},
+ {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"},
+]
+
+[package.extras]
+docs = ["Sphinx", "docutils (<0.18)"]
+test = ["objgraph", "psutil"]
+
+[[package]]
+name = "grpcio"
+version = "1.60.0"
+description = "HTTP/2-based RPC framework"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"},
+ {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"},
+ {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"},
+ {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"},
+ {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"},
+ {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"},
+ {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"},
+ {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"},
+ {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"},
+ {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"},
+ {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"},
+ {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"},
+ {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"},
+ {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"},
+ {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"},
+ {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"},
+ {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"},
+ {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"},
+ {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"},
+ {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"},
+ {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"},
+ {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"},
+ {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"},
+ {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"},
+ {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"},
+ {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"},
+ {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"},
+ {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"},
+ {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"},
+ {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"},
+ {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"},
+ {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"},
+ {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"},
+ {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"},
+ {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"},
+ {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"},
+ {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"},
+ {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"},
+ {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"},
+ {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"},
+ {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"},
+ {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"},
+ {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"},
+ {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"},
+ {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"},
+ {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"},
+ {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"},
+ {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"},
+ {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"},
+ {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"},
+ {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"},
+ {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"},
+ {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"},
+ {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"},
+]
+
+[package.extras]
+protobuf = ["grpcio-tools (>=1.60.0)"]
+
+[[package]]
+name = "grpcio-tools"
+version = "1.48.2"
+description = "Protobuf code generator for gRPC"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "grpcio-tools-1.48.2.tar.gz", hash = "sha256:8902a035708555cddbd61b5467cea127484362decc52de03f061a1a520fe90cd"},
+ {file = "grpcio_tools-1.48.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:92acc3e10ba2b0dcb90a88ae9fe1cc0ffba6868545207e4ff20ca95284f8e3c9"},
+ {file = "grpcio_tools-1.48.2-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e5bb396d63495667d4df42e506eed9d74fc9a51c99c173c04395fe7604c848f1"},
+ {file = "grpcio_tools-1.48.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:84a84d601a238572d049d3108e04fe4c206536e81076d56e623bd525a1b38def"},
+ {file = "grpcio_tools-1.48.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70564521e86a0de35ea9ac6daecff10cb46860aec469af65869974807ce8e98b"},
+ {file = "grpcio_tools-1.48.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdbbe63f6190187de5946891941629912ac8196701ed2253fa91624a397822ec"},
+ {file = "grpcio_tools-1.48.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae56f133b05b7e5d780ef7e032dd762adad7f3dc8f64adb43ff5bfabd659f435"},
+ {file = "grpcio_tools-1.48.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0feb4f2b777fa6377e977faa89c26359d4f31953de15e035505b92f41aa6906"},
+ {file = "grpcio_tools-1.48.2-cp310-cp310-win32.whl", hash = "sha256:80f450272316ca0924545f488c8492649ca3aeb7044d4bf59c426dcdee527f7c"},
+ {file = "grpcio_tools-1.48.2-cp310-cp310-win_amd64.whl", hash = "sha256:21ff50e321736eba22210bf9b94e05391a9ac345f26e7df16333dc75d63e74fb"},
+ {file = "grpcio_tools-1.48.2-cp36-cp36m-linux_armv7l.whl", hash = "sha256:d598ccde6338b2cfbb3124f34c95f03394209013f9b1ed4a5360a736853b1c27"},
+ {file = "grpcio_tools-1.48.2-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:a43d26714933f23de93ea0bf9c86c66a6ede709b8ca32e357f9e2181703e64ae"},
+ {file = "grpcio_tools-1.48.2-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:55fdebc73fb580717656b1bafa4f8eca448726a7aa22726a6c0a7895d2f0f088"},
+ {file = "grpcio_tools-1.48.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8588819b22d0de3aa1951e1991cc3e4b9aa105eecf6e3e24eb0a2fc8ab958b3e"},
+ {file = "grpcio_tools-1.48.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9771d4d317dca029dfaca7ec9282d8afe731c18bc536ece37fd39b8a974cc331"},
+ {file = "grpcio_tools-1.48.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d886a9e052a038642b3af5d18e6f2085d1656d9788e202dc23258cf3a751e7ca"},
+ {file = "grpcio_tools-1.48.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d77e8b1613876e0d8fd17709509d4ceba13492816426bd156f7e88a4c47e7158"},
+ {file = "grpcio_tools-1.48.2-cp36-cp36m-win32.whl", hash = "sha256:dcaaecdd5e847de5c1d533ea91522bf56c9e6b2dc98cdc0d45f0a1c26e846ea2"},
+ {file = "grpcio_tools-1.48.2-cp36-cp36m-win_amd64.whl", hash = "sha256:0119aabd9ceedfdf41b56b9fdc8284dd85a7f589d087f2694d743f346a368556"},
+ {file = "grpcio_tools-1.48.2-cp37-cp37m-linux_armv7l.whl", hash = "sha256:189be2a9b672300ca6845d94016bdacc052fdbe9d1ae9e85344425efae2ff8ef"},
+ {file = "grpcio_tools-1.48.2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:9443f5c30bac449237c3cf99da125f8d6e6c01e17972bc683ee73b75dea95573"},
+ {file = "grpcio_tools-1.48.2-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:e0403e095b343431195db1305248b50019ad55d3dd310254431af87e14ef83a2"},
+ {file = "grpcio_tools-1.48.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5410d6b601d1404835e34466bd8aee37213489b36ee1aad2276366e265ff29d4"},
+ {file = "grpcio_tools-1.48.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51be91b7c7056ff9ee48b1eccd4a2840b0126230803a5e09dfc082a5b16a91c1"},
+ {file = "grpcio_tools-1.48.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:516eedd5eb7af6326050bc2cfceb3a977b9cc1144f283c43cc4956905285c912"},
+ {file = "grpcio_tools-1.48.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d18599ab572b2f15a8f3db49503272d1bb4fcabb4b4d1214ef03aca1816b20a0"},
+ {file = "grpcio_tools-1.48.2-cp37-cp37m-win32.whl", hash = "sha256:d18ef2adc05a8ef9e58ac46357f6d4ce7e43e077c7eda0a4425773461f9d0e6e"},
+ {file = "grpcio_tools-1.48.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d9753944e5a6b6b78b76ce9d2ae0fe3f748008c1849deb7fadcb64489d6553b"},
+ {file = "grpcio_tools-1.48.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:3c8749dca04a8d302862ceeb1dfbdd071ee13b281395975f24405a347e5baa57"},
+ {file = "grpcio_tools-1.48.2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:7307dd2408b82ea545ae63502ec03036b025f449568556ea9a056e06129a7a4e"},
+ {file = "grpcio_tools-1.48.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:072234859f6069dc43a6be8ad6b7d682f4ba1dc2e2db2ebf5c75f62eee0f6dfb"},
+ {file = "grpcio_tools-1.48.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cc298fbfe584de8876a85355efbcf796dfbcfac5948c9560f5df82e79336e2a"},
+ {file = "grpcio_tools-1.48.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f75973a42c710999acd419968bc79f00327e03e855bbe82c6529e003e49af660"},
+ {file = "grpcio_tools-1.48.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f766050e491d0b3203b6b85638015f543816a2eb7d089fc04e86e00f6de0e31d"},
+ {file = "grpcio_tools-1.48.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8e0d74403484eb77e8df2566a64b8b0b484b5c87903678c381634dd72f252d5e"},
+ {file = "grpcio_tools-1.48.2-cp38-cp38-win32.whl", hash = "sha256:cb75bac0cd43858cb759ef103fe68f8c540cb58b63dda127e710228fec3007b8"},
+ {file = "grpcio_tools-1.48.2-cp38-cp38-win_amd64.whl", hash = "sha256:cabc8b0905cedbc3b2b7b2856334fa35cce3d4bc79ae241cacd8cca8940a5c85"},
+ {file = "grpcio_tools-1.48.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:e712a6d00606ad19abdeae852a7e521d6f6d0dcea843708fecf3a38be16a851e"},
+ {file = "grpcio_tools-1.48.2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:e7e7668f89fd598c5469bb58e16bfd12b511d9947ccc75aec94da31f62bc3758"},
+ {file = "grpcio_tools-1.48.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:a415fbec67d4ff7efe88794cbe00cf548d0f0a5484cceffe0a0c89d47694c491"},
+ {file = "grpcio_tools-1.48.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d96e96ae7361aa51c9cd9c73b677b51f691f98df6086860fcc3c45852d96b0b0"},
+ {file = "grpcio_tools-1.48.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e20d7885a40e68a2bda92908acbabcdf3c14dd386c3845de73ba139e9df1f132"},
+ {file = "grpcio_tools-1.48.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8a5614251c46da07549e24f417cf989710250385e9d80deeafc53a0ee7df6325"},
+ {file = "grpcio_tools-1.48.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ace0035766fe01a1b096aa050be9f0a9f98402317e7aeff8bfe55349be32a407"},
+ {file = "grpcio_tools-1.48.2-cp39-cp39-win32.whl", hash = "sha256:4fa4300b1be59b046492ed3c5fdb59760bc6433f44c08f50de900f9552ec7461"},
+ {file = "grpcio_tools-1.48.2-cp39-cp39-win_amd64.whl", hash = "sha256:0fb6c1c1e56eb26b224adc028a4204b6ad0f8b292efa28067dff273bbc8b27c4"},
+]
+
+[package.dependencies]
+grpcio = ">=1.48.2"
+protobuf = ">=3.12.0,<4.0dev"
+setuptools = "*"
+
+[[package]]
+name = "gspread"
+version = "5.11.2"
+description = "Google Spreadsheets Python API"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "gspread-5.11.2-py3-none-any.whl", hash = "sha256:525a9d3ef712d5747867d32b61f5d7aa035ead0835b56cd1ae2a6d310eaef077"},
+ {file = "gspread-5.11.2.tar.gz", hash = "sha256:fdc477cbda48bc9ea77eb8a4bf737985bfdba44f04677e4d791eb70bcbae2b95"},
+]
+
+[package.dependencies]
+google-auth = ">=1.12.0"
+google-auth-oauthlib = ">=0.4.1"
+
+[[package]]
+name = "gunicorn"
+version = "22.0.0"
+description = "WSGI HTTP Server for UNIX"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"},
+ {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"},
+]
+
+[package.dependencies]
+packaging = "*"
+
+[package.extras]
+eventlet = ["eventlet (>=0.24.1,!=0.36.0)"]
+gevent = ["gevent (>=1.4.0)"]
+setproctitle = ["setproctitle"]
+testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"]
+tornado = ["tornado (>=0.2)"]
+
+[[package]]
+name = "h11"
+version = "0.14.0"
+description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
+ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
+]
+
+[[package]]
+name = "httpcore"
+version = "0.16.3"
+description = "A minimal low-level HTTP client."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"},
+ {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"},
+]
+
+[package.dependencies]
+anyio = ">=3.0,<5.0"
+certifi = "*"
+h11 = ">=0.13,<0.15"
+sniffio = "==1.*"
+
+[package.extras]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+
+[[package]]
+name = "httplib2"
+version = "0.19.0"
+description = "A comprehensive HTTP client library."
+optional = false
+python-versions = "*"
+files = [
+ {file = "httplib2-0.19.0-py3-none-any.whl", hash = "sha256:749c32603f9bf16c1277f59531d502e8f1c2ca19901ae653b49c4ed698f0820e"},
+ {file = "httplib2-0.19.0.tar.gz", hash = "sha256:e0d428dad43c72dbce7d163b7753ffc7a39c097e6788ef10f4198db69b92f08e"},
+]
+
+[package.dependencies]
+pyparsing = ">=2.4.2,<3"
+
+[[package]]
+name = "httpx"
+version = "0.23.3"
+description = "The next generation HTTP client."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"},
+ {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"},
+]
+
+[package.dependencies]
+certifi = "*"
+httpcore = ">=0.15.0,<0.17.0"
+rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
+sniffio = "*"
+
+[package.extras]
+brotli = ["brotli", "brotlicffi"]
+cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+
+[[package]]
+name = "identify"
+version = "2.5.33"
+description = "File identification library for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"},
+ {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"},
+]
+
+[package.extras]
+license = ["ukkonen"]
+
+[[package]]
+name = "idna"
+version = "3.7"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
+ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "7.0.1"
+description = "Read metadata from Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"},
+ {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"},
+]
+
+[package.dependencies]
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
+perf = ["ipython"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
+
+[[package]]
+name = "importlib-resources"
+version = "6.1.1"
+description = "Read resources from Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"},
+ {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"},
+]
+
+[package.dependencies]
+zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"]
+
+[[package]]
+name = "impyla"
+version = "0.16.0"
+description = "Python client for the Impala distributed query engine"
+optional = false
+python-versions = "*"
+files = [
+ {file = "impyla-0.16.0.tar.gz", hash = "sha256:df46d6aae12cb3657293c739e2b61efead4ebf8186e813e7ea0d20e3527a3099"},
+]
+
+[package.dependencies]
+bitarray = "*"
+six = "*"
+thrift = ">=0.9.3"
+thriftpy2 = {version = ">=0.4.0,<0.5.0", markers = "python_version >= \"3.0\""}
+
+[package.extras]
+kerberos = ["thrift_sasl (==0.2.1)"]
+
+[[package]]
+name = "influxdb"
+version = "5.2.3"
+description = "InfluxDB client"
+optional = false
+python-versions = "*"
+files = [
+ {file = "influxdb-5.2.3-py2.py3-none-any.whl", hash = "sha256:270ec1ec9cf1927a38cf5ec808e76f364482977577eb8c335f6aed5fcdc4cb25"},
+ {file = "influxdb-5.2.3.tar.gz", hash = "sha256:30276c7e04bf7659424c733b239ba2f0804d7a1f3c59ec5dd3f88c56176c8d36"},
+]
+
+[package.dependencies]
+python-dateutil = ">=2.6.0"
+pytz = "*"
+requests = ">=2.17.0"
+six = ">=1.10.0"
+
+[package.extras]
+test = ["mock", "nose", "nose-cov", "requests-mock"]
+
+[[package]]
+name = "influxdb-client"
+version = "1.38.0"
+description = "InfluxDB 2.0 Python client library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "influxdb_client-1.38.0-py3-none-any.whl", hash = "sha256:7d04c06b833800be3350c6cb8f19f01f3f4ab33a77a24969568a141e4e132358"},
+ {file = "influxdb_client-1.38.0.tar.gz", hash = "sha256:88ee8c1beb6b3b1359f4117d51704d5da5ac70e598b9fe786823e36ac86175a8"},
+]
+
+[package.dependencies]
+certifi = ">=14.05.14"
+python-dateutil = ">=2.5.3"
+reactivex = ">=4.0.4"
+setuptools = ">=21.0.0"
+urllib3 = ">=1.26.0"
+
+[package.extras]
+async = ["aiocsv (>=1.2.2)", "aiohttp (>=3.8.1)"]
+ciso = ["ciso8601 (>=2.1.1)"]
+extra = ["numpy", "pandas (>=0.25.3)"]
+test = ["aioresponses (>=0.7.3)", "coverage (>=4.0.3)", "flake8 (>=5.0.3)", "httpretty (==1.0.5)", "jinja2 (==3.1.2)", "nose (>=1.3.7)", "pluggy (>=0.3.1)", "psutil (>=5.6.3)", "py (>=1.4.31)", "pytest (>=5.0.0)", "pytest-cov (>=3.0.0)", "pytest-timeout (>=2.1.0)", "randomize (>=0.13)", "sphinx (==1.8.5)", "sphinx-rtd-theme"]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "isodate"
+version = "0.6.1"
+description = "An ISO 8601 date/time/duration parser and formatter"
+optional = false
+python-versions = "*"
+files = [
+ {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"},
+ {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"},
+]
+
+[package.dependencies]
+six = "*"
+
+[[package]]
+name = "itsdangerous"
+version = "2.1.2"
+description = "Safely pass data to untrusted environments and back."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
+ {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
+]
+
+[[package]]
+name = "jedi"
+version = "0.19.1"
+description = "An autocompletion tool for Python that can be used for text editors."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"},
+ {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
+]
+
+[package.dependencies]
+parso = ">=0.8.3,<0.9.0"
+
+[package.extras]
+docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"]
+qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
+testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
+
+[[package]]
+name = "jinja2"
+version = "3.1.4"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
+ {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "jmespath"
+version = "1.0.1"
+description = "JSON Matching Expressions"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
+ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
+]
+
+[[package]]
+name = "jsonschema"
+version = "3.1.1"
+description = "An implementation of JSON Schema validation for Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "jsonschema-3.1.1-py2.py3-none-any.whl", hash = "sha256:94c0a13b4a0616458b42529091624e66700a17f847453e52279e35509a5b7631"},
+ {file = "jsonschema-3.1.1.tar.gz", hash = "sha256:2fa0684276b6333ff3c0b1b27081f4b2305f0a36cf702a23db50edb141893c3f"},
+]
+
+[package.dependencies]
+attrs = ">=17.4.0"
+importlib-metadata = "*"
+pyrsistent = ">=0.14.0"
+setuptools = "*"
+six = ">=1.11.0"
+
+[package.extras]
+format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"]
+
+[[package]]
+name = "jwcrypto"
+version = "1.5.6"
+description = "Implementation of JOSE Web standards"
+optional = false
+python-versions = ">= 3.8"
+files = [
+ {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"},
+ {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"},
+]
+
+[package.dependencies]
+cryptography = ">=3.4"
+typing-extensions = ">=4.5.0"
+
+[[package]]
+name = "ldap3"
+version = "2.9.1"
+description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
+ {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.4.6"
+
+[[package]]
+name = "limits"
+version = "3.7.0"
+description = "Rate limiting utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "limits-3.7.0-py3-none-any.whl", hash = "sha256:c528817b7fc15f3e86ad091ba3e40231f6430a91b753db864767684cda8a7f2e"},
+ {file = "limits-3.7.0.tar.gz", hash = "sha256:124c6a04d2f4b20990fb1de019eec9474d6c1346c70d8fd0561609b86998b64a"},
+]
+
+[package.dependencies]
+deprecated = ">=1.2"
+importlib-resources = ">=1.3"
+packaging = ">=21,<24"
+typing-extensions = "*"
+
+[package.extras]
+all = ["aetcd", "coredis (>=3.4.0,<5)", "emcache (>=0.6.1)", "emcache (>=1)", "etcd3", "motor (>=3,<4)", "pymemcache (>3,<5.0.0)", "pymongo (>4.1,<5)", "redis (>3,!=4.5.2,!=4.5.3,<6.0.0)", "redis (>=4.2.0,!=4.5.2,!=4.5.3)"]
+async-etcd = ["aetcd"]
+async-memcached = ["emcache (>=0.6.1)", "emcache (>=1)"]
+async-mongodb = ["motor (>=3,<4)"]
+async-redis = ["coredis (>=3.4.0,<5)"]
+etcd = ["etcd3"]
+memcached = ["pymemcache (>3,<5.0.0)"]
+mongodb = ["pymongo (>4.1,<5)"]
+redis = ["redis (>3,!=4.5.2,!=4.5.3,<6.0.0)"]
+rediscluster = ["redis (>=4.2.0,!=4.5.2,!=4.5.3)"]
+
+[[package]]
+name = "mako"
+version = "1.3.0"
+description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"},
+ {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=0.9.2"
+
+[package.extras]
+babel = ["Babel"]
+lingua = ["lingua"]
+testing = ["pytest"]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+description = "Python port of markdown-it. Markdown parsing, done right!"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+ {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[package.dependencies]
+mdurl = ">=0.1,<1.0"
+
+[package.extras]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
+plugins = ["mdit-py-plugins"]
+profiling = ["gprof2dot"]
+rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.1"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
+ {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
+]
+
+[[package]]
+name = "marshmallow"
+version = "3.20.2"
+description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"},
+ {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"},
+]
+
+[package.dependencies]
+packaging = ">=17.0"
+
+[package.extras]
+dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"]
+docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
+lint = ["pre-commit (>=2.4,<4.0)"]
+tests = ["pytest", "pytz", "simplejson"]
+
+[[package]]
+name = "maxminddb"
+version = "2.5.2"
+description = "Reader for the MaxMind DB format"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "maxminddb-2.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f5682963a5817066db50f219c33aaa7eb969888211a289a444c42b5dfa0c0f78"},
+ {file = "maxminddb-2.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fe6bb1b5ea132fcd9fd7b16c80247f0ba667018d5f9f98cd645b297e3b02fbf"},
+ {file = "maxminddb-2.5.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:955a3ec4b161e872cc615b7a09ae9770049e9794e7b3832e3d78905a65c5049d"},
+ {file = "maxminddb-2.5.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:29d63e7711e5f95c7c190010e57dca9e262aee8ac300aaf75c3f7ede0b5a5863"},
+ {file = "maxminddb-2.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:08a540ec3661f6ca40499c86028e96dca5780e9d471b485dc797859b0b22dd22"},
+ {file = "maxminddb-2.5.2-cp310-cp310-win32.whl", hash = "sha256:17fdb691c389a0e956410d5baef9ad082a0aa67dd6aa231d193499e71a104c19"},
+ {file = "maxminddb-2.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:d71b48d3dff9150a44e949b28fa5e7251a7a6895a3a77e200ce08410f096f12f"},
+ {file = "maxminddb-2.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1409a045eb04cebb297221eab1020c4f05434d02c0961410f6996ef474482998"},
+ {file = "maxminddb-2.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d839c480e4b93bb37bb1cc2777d77e6b2127c006e60b56f748f10571d8b0e471"},
+ {file = "maxminddb-2.5.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bca70905515fe50684974a9afaa7db4a4e9fbfdebcb0c2cde9db8e048e0d8145"},
+ {file = "maxminddb-2.5.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:67f97cd0c6aac39a51294b04a1e922532125285c24b18a58e2a9c92c7691fa9f"},
+ {file = "maxminddb-2.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a3fab6bea6cc59444e6bad2a4fbf91228f6f51dcb29d09ed091930a475bd8cb"},
+ {file = "maxminddb-2.5.2-cp311-cp311-win32.whl", hash = "sha256:a99e3125528ea31e807f80e8c5b65118dc5cc122d0a435f1691a3cc1df55840c"},
+ {file = "maxminddb-2.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:b6adf63695fa5e3d2549f7c2c9d82c6d252edd5c6ba67074637d2cb944143673"},
+ {file = "maxminddb-2.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ed504ca9f3c42e8e71bdbe21f5b818139a1448ac15d7bb6ce12cf41e3b7e2067"},
+ {file = "maxminddb-2.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a5053231228d7cbf57d98a741b3cbee9efa9e689348dbb56c414e5a4c7f6f1c"},
+ {file = "maxminddb-2.5.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7e8688342bab592647313cd2054779bcd35ad85933424ceae9f07e3a9779986"},
+ {file = "maxminddb-2.5.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:335ee3140b41d4e751c14f8fae297aa064c7d3f184c9fbb2790336123187c440"},
+ {file = "maxminddb-2.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b0203fa2731da45e5461f6e8a0768e85bba8e02137a1598b3fcadf7cbfe8e6f2"},
+ {file = "maxminddb-2.5.2-cp312-cp312-win32.whl", hash = "sha256:8b89129de70e1629f200df9dfda4e4f477c26b05c29e0836604a00209c9466d5"},
+ {file = "maxminddb-2.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:099f4e27feec4bb9658034a3eb853e746721fc15709030bee4f2f889f4a34185"},
+ {file = "maxminddb-2.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19d8d1e9bbc5281fb4c8112d541d2bd350fd8b5ddfbb43a6951e46df7cd27b9d"},
+ {file = "maxminddb-2.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94183a78628cad257183a88ce12a3bb9ffbfe0544bd0c1aafc1f9dc55629dd1b"},
+ {file = "maxminddb-2.5.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17de49660372dcccaa23958eccdd1c2464f92f594d027045ad76788db14a5da4"},
+ {file = "maxminddb-2.5.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae05c4f87b1dd9a21d430c52451eef5f3bd5af609d093408db91fe0dc4d8d7d1"},
+ {file = "maxminddb-2.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cb718908b9dffa10e02361094158ae68ded5a82c750de89737437999a81bafe"},
+ {file = "maxminddb-2.5.2-cp38-cp38-win32.whl", hash = "sha256:e0faa0c4c458eb0eb2f267daa7b106baef72c3c7ebcbece00b9e974fc8321412"},
+ {file = "maxminddb-2.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:bac5a29fdc5df9222f7baecbcc4a88b309a66a7d147b34160940c0850ee4b9c5"},
+ {file = "maxminddb-2.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c204f53ef7c1d77e9fb0dba415dbb56419f2b08ccaca66cd772e29b3a793c3e7"},
+ {file = "maxminddb-2.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae98508a200db6f7ae5985a53039aba8eef7ed71d34b0a0e9c9145c3e6139fc3"},
+ {file = "maxminddb-2.5.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e9198d25e252b27d4e9526d5fcd4b78341c23153363a94f1246de5afcd39f6d"},
+ {file = "maxminddb-2.5.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b85b008f8e2cf3abfabdc24041549c51c97ea9a8bc46eeeadac8cec7acf9fbf0"},
+ {file = "maxminddb-2.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6f50210506e9818162ef6706d3127efb0575dfe2cc98a7236ca2011f1cc3effe"},
+ {file = "maxminddb-2.5.2-cp39-cp39-win32.whl", hash = "sha256:2bba43d370a57785f5ef61c10d0b4bf8de58d431da3c4c2ed78bb2ff3d07edbf"},
+ {file = "maxminddb-2.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:2e01b09480b97d2ebe6765618fb12a0f52caa17368d6cf1f42481d6740428de7"},
+ {file = "maxminddb-2.5.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:dd47d13376eaee2e8d1a1fb55d3d6ccdcc995bc931699967f7d5670ec6a454a3"},
+ {file = "maxminddb-2.5.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abd626efaba4f0bc867462337f846796da0bb97b82125dbdbc63067947e353b0"},
+ {file = "maxminddb-2.5.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ddbe547d83a2e28e81d9f59fd9708d3044ffb2398ee0f8df2e2a2e9cdea6646"},
+ {file = "maxminddb-2.5.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:22184fa2514c15f5b39e4e2522f4f73d00afcf5eb7102c473f9376f3c3a03b81"},
+ {file = "maxminddb-2.5.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5cb6702fbcc5b209ac3cffacd9cf0a5155feabbeb6fdcf497038be7cb6e52da6"},
+ {file = "maxminddb-2.5.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0c3ebfc0af00445089629faffa4c5a1fcc42a1ca5d7dffc42bba314fde20c6d"},
+ {file = "maxminddb-2.5.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:461dcf0a4f67aa1c9faea6d52c4060d39559bf68e99a514cf8c1e01af383f90b"},
+ {file = "maxminddb-2.5.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e012e889639aab411f5483990188da51c968377f665dcb90584971dbf314d50a"},
+ {file = "maxminddb-2.5.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:20596e452d03071db37a72c8ef9236126c04ed342864f68db0adf0d1bc9f642e"},
+ {file = "maxminddb-2.5.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ec51b66774b102824c9a3dd4916356283f6a61db1868d4ebcb98bf26486718e"},
+ {file = "maxminddb-2.5.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fda0dd512f345cc92492f96c61a0df47efc2e2064c15e8053ab2114b362d64d"},
+ {file = "maxminddb-2.5.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:862fcfe226ebda29a537cdce678dc8dc71ca6540ad2483099f80c6a1ee4cdbdd"},
+ {file = "maxminddb-2.5.2.tar.gz", hash = "sha256:b3c33e4fc7821ee6c9f40837116e16ab6175863d4a64eee024c5bec686690a87"},
+]
+
+[package.dependencies]
+setuptools = ">=68.2.2"
+
+[[package]]
+name = "maxminddb-geolite2"
+version = "2018.703"
+description = "Provides access to the geolite2 database. This product includes GeoLite2 data created by MaxMind, available from http://www.maxmind.com/"
+optional = false
+python-versions = "*"
+files = [
+ {file = "maxminddb-geolite2-2018.703.tar.gz", hash = "sha256:2bd118c5567f3a8323d6c5da23a6e6d52cfc09cd9987b54eb712cf6001a96e03"},
+]
+
+[package.dependencies]
+maxminddb = "*"
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "memsql"
+version = "3.2.0"
+description = "Useful utilities and plugins for MemSQL integration."
+optional = false
+python-versions = "*"
+files = [
+ {file = "memsql-3.2.0.tar.gz", hash = "sha256:d986c979d066e243bb35a16c030358838b2c155b9f0dee046f7dac1e99c291d6"},
+]
+
+[package.dependencies]
+mysqlclient = ">=1.4,<3.0"
+python-dateutil = "<3.0"
+simplejson = "*"
+wraptor = "*"
+
+[[package]]
+name = "mock"
+version = "5.0.2"
+description = "Rolling backport of unittest.mock for all Pythons"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "mock-5.0.2-py3-none-any.whl", hash = "sha256:0e0bc5ba78b8db3667ad636d964eb963dc97a59f04c6f6214c5f0e4a8f726c56"},
+ {file = "mock-5.0.2.tar.gz", hash = "sha256:06f18d7d65b44428202b145a9a36e99c2ee00d1eb992df0caf881d4664377891"},
+]
+
+[package.extras]
+build = ["blurb", "twine", "wheel"]
+docs = ["sphinx"]
+test = ["pytest", "pytest-cov"]
+
+[[package]]
+name = "msgpack"
+version = "1.0.7"
+description = "MessagePack serializer"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"},
+ {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"},
+ {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"},
+ {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"},
+ {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"},
+ {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"},
+ {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"},
+ {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"},
+ {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"},
+ {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"},
+ {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"},
+ {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"},
+ {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"},
+ {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"},
+ {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"},
+ {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"},
+ {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"},
+ {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"},
+ {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"},
+ {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"},
+ {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"},
+ {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"},
+ {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"},
+ {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"},
+ {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"},
+ {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"},
+ {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"},
+ {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"},
+ {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"},
+ {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"},
+ {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"},
+ {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"},
+ {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"},
+ {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"},
+ {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"},
+ {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"},
+ {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"},
+ {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"},
+ {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"},
+ {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"},
+ {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"},
+ {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"},
+ {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"},
+ {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"},
+ {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"},
+ {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"},
+ {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"},
+ {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"},
+ {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"},
+ {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"},
+ {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"},
+ {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"},
+ {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"},
+ {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"},
+ {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"},
+ {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"},
+]
+
+[[package]]
+name = "mysql-connector"
+version = "2.2.9"
+description = "MySQL driver written in Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "mysql-connector-2.2.9.tar.gz", hash = "sha256:1733e6ce52a049243de3264f1fbc22a852cb35458c4ad739ba88189285efdf32"},
+]
+
+[[package]]
+name = "mysqlclient"
+version = "2.1.1"
+description = "Python interface to MySQL"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mysqlclient-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1ed71bd6244993b526113cca3df66428609f90e4652f37eb51c33496d478b37"},
+ {file = "mysqlclient-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:c812b67e90082a840efb82a8978369e6e69fc62ce1bda4ca8f3084a9d862308b"},
+ {file = "mysqlclient-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:0d1cd3a5a4d28c222fa199002810e8146cffd821410b67851af4cc80aeccd97c"},
+ {file = "mysqlclient-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:b355c8b5a7d58f2e909acdbb050858390ee1b0e13672ae759e5e784110022994"},
+ {file = "mysqlclient-2.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:996924f3483fd36a34a5812210c69e71dea5a3d5978d01199b78b7f6d485c855"},
+ {file = "mysqlclient-2.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:dea88c8d3f5a5d9293dfe7f087c16dd350ceb175f2f6631c9cf4caf3e19b7a96"},
+ {file = "mysqlclient-2.1.1.tar.gz", hash = "sha256:828757e419fb11dd6c5ed2576ec92c3efaa93a0f7c39e263586d1ee779c3d782"},
+]
+
+[[package]]
+name = "ndg-httpsclient"
+version = "0.5.1"
+description = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL"
+optional = false
+python-versions = ">=2.7,<3.0.dev0 || >=3.4.dev0"
+files = [
+ {file = "ndg_httpsclient-0.5.1-py2-none-any.whl", hash = "sha256:d2c7225f6a1c6cf698af4ebc962da70178a99bcde24ee6d1961c4f3338130d57"},
+ {file = "ndg_httpsclient-0.5.1-py3-none-any.whl", hash = "sha256:dd174c11d971b6244a891f7be2b32ca9853d3797a72edb34fa5d7b07d8fff7d4"},
+ {file = "ndg_httpsclient-0.5.1.tar.gz", hash = "sha256:d72faed0376ab039736c2ba12e30695e2788c4aa569c9c3e3d72131de2592210"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.1.1"
+PyOpenSSL = "*"
+
+[[package]]
+name = "netifaces"
+version = "0.11.0"
+description = "Portable network interface information."
+optional = false
+python-versions = "*"
+files = [
+ {file = "netifaces-0.11.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eb4813b77d5df99903af4757ce980a98c4d702bbcb81f32a0b305a1537bdf0b1"},
+ {file = "netifaces-0.11.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5f9ca13babe4d845e400921973f6165a4c2f9f3379c7abfc7478160e25d196a4"},
+ {file = "netifaces-0.11.0-cp27-cp27m-win32.whl", hash = "sha256:7dbb71ea26d304e78ccccf6faccef71bb27ea35e259fb883cfd7fd7b4f17ecb1"},
+ {file = "netifaces-0.11.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0f6133ac02521270d9f7c490f0c8c60638ff4aec8338efeff10a1b51506abe85"},
+ {file = "netifaces-0.11.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08e3f102a59f9eaef70948340aeb6c89bd09734e0dca0f3b82720305729f63ea"},
+ {file = "netifaces-0.11.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c03fb2d4ef4e393f2e6ffc6376410a22a3544f164b336b3a355226653e5efd89"},
+ {file = "netifaces-0.11.0-cp34-cp34m-win32.whl", hash = "sha256:73ff21559675150d31deea8f1f8d7e9a9a7e4688732a94d71327082f517fc6b4"},
+ {file = "netifaces-0.11.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:815eafdf8b8f2e61370afc6add6194bd5a7252ae44c667e96c4c1ecf418811e4"},
+ {file = "netifaces-0.11.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:50721858c935a76b83dd0dd1ab472cad0a3ef540a1408057624604002fcfb45b"},
+ {file = "netifaces-0.11.0-cp35-cp35m-win32.whl", hash = "sha256:c9a3a47cd3aaeb71e93e681d9816c56406ed755b9442e981b07e3618fb71d2ac"},
+ {file = "netifaces-0.11.0-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:aab1dbfdc55086c789f0eb37affccf47b895b98d490738b81f3b2360100426be"},
+ {file = "netifaces-0.11.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c37a1ca83825bc6f54dddf5277e9c65dec2f1b4d0ba44b8fd42bc30c91aa6ea1"},
+ {file = "netifaces-0.11.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:28f4bf3a1361ab3ed93c5ef360c8b7d4a4ae060176a3529e72e5e4ffc4afd8b0"},
+ {file = "netifaces-0.11.0-cp36-cp36m-win32.whl", hash = "sha256:2650beee182fed66617e18474b943e72e52f10a24dc8cac1db36c41ee9c041b7"},
+ {file = "netifaces-0.11.0-cp36-cp36m-win_amd64.whl", hash = "sha256:cb925e1ca024d6f9b4f9b01d83215fd00fe69d095d0255ff3f64bffda74025c8"},
+ {file = "netifaces-0.11.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:84e4d2e6973eccc52778735befc01638498781ce0e39aa2044ccfd2385c03246"},
+ {file = "netifaces-0.11.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18917fbbdcb2d4f897153c5ddbb56b31fa6dd7c3fa9608b7e3c3a663df8206b5"},
+ {file = "netifaces-0.11.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:48324183af7f1bc44f5f197f3dad54a809ad1ef0c78baee2c88f16a5de02c4c9"},
+ {file = "netifaces-0.11.0-cp37-cp37m-win32.whl", hash = "sha256:8f7da24eab0d4184715d96208b38d373fd15c37b0dafb74756c638bd619ba150"},
+ {file = "netifaces-0.11.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2479bb4bb50968089a7c045f24d120f37026d7e802ec134c4490eae994c729b5"},
+ {file = "netifaces-0.11.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3ecb3f37c31d5d51d2a4d935cfa81c9bc956687c6f5237021b36d6fdc2815b2c"},
+ {file = "netifaces-0.11.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96c0fe9696398253f93482c84814f0e7290eee0bfec11563bd07d80d701280c3"},
+ {file = "netifaces-0.11.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c92ff9ac7c2282009fe0dcb67ee3cd17978cffbe0c8f4b471c00fe4325c9b4d4"},
+ {file = "netifaces-0.11.0-cp38-cp38-win32.whl", hash = "sha256:d07b01c51b0b6ceb0f09fc48ec58debd99d2c8430b09e56651addeaf5de48048"},
+ {file = "netifaces-0.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:469fc61034f3daf095e02f9f1bbac07927b826c76b745207287bc594884cfd05"},
+ {file = "netifaces-0.11.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5be83986100ed1fdfa78f11ccff9e4757297735ac17391b95e17e74335c2047d"},
+ {file = "netifaces-0.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:54ff6624eb95b8a07e79aa8817288659af174e954cca24cdb0daeeddfc03c4ff"},
+ {file = "netifaces-0.11.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:841aa21110a20dc1621e3dd9f922c64ca64dd1eb213c47267a2c324d823f6c8f"},
+ {file = "netifaces-0.11.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e76c7f351e0444721e85f975ae92718e21c1f361bda946d60a214061de1f00a1"},
+ {file = "netifaces-0.11.0.tar.gz", hash = "sha256:043a79146eb2907edf439899f262b3dfe41717d34124298ed281139a8b93ca32"},
+]
+
+[[package]]
+name = "nodeenv"
+version = "1.8.0"
+description = "Node.js virtual environment builder"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
+files = [
+ {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
+ {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[[package]]
+name = "numpy"
+version = "1.24.4"
+description = "Fundamental package for array computing in Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"},
+ {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"},
+ {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"},
+ {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"},
+ {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"},
+ {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"},
+ {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"},
+ {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"},
+ {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"},
+ {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"},
+ {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"},
+ {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"},
+ {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"},
+ {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"},
+ {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"},
+ {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"},
+ {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"},
+ {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"},
+ {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"},
+ {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"},
+ {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"},
+ {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"},
+ {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"},
+ {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"},
+ {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"},
+ {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"},
+ {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"},
+ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"},
+]
+
+[[package]]
+name = "numpy"
+version = "1.26.3"
+description = "Fundamental package for array computing in Python"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"},
+ {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"},
+ {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"},
+ {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"},
+ {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"},
+ {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"},
+ {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"},
+ {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"},
+ {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"},
+ {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"},
+ {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"},
+ {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"},
+ {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"},
+ {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"},
+ {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"},
+ {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"},
+ {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"},
+ {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"},
+ {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"},
+ {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"},
+ {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"},
+ {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"},
+ {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"},
+ {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"},
+ {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"},
+ {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"},
+ {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"},
+ {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"},
+ {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"},
+ {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"},
+ {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"},
+ {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"},
+ {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"},
+ {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"},
+ {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"},
+ {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"},
+]
+
+[[package]]
+name = "nzalchemy"
+version = "11.0.2"
+description = "Netezza Dialect for SQLAlchemy"
+optional = false
+python-versions = "*"
+files = [
+ {file = "nzalchemy-11.0.2-py3-none-any.whl", hash = "sha256:b7a7909bdab2a511f46d205598ec2b7d09cea1042f2a04e8351048c7a2811348"},
+ {file = "nzalchemy-11.0.2.tar.gz", hash = "sha256:4e05a5a8df984d43b8d301f7b19fb4f4408bac9eb0421205adb1ab29827de50e"},
+]
+
+[package.dependencies]
+nzpy = "*"
+SQLAlchemy = "<=1.3.24"
+
+[[package]]
+name = "nzpy"
+version = "1.15"
+description = "IBM Netezza python driver"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "nzpy-1.15-py3-none-any.whl", hash = "sha256:ca0290046cb9117032ae022fc64d11a383f753f410f0459657419f7ff1769820"},
+ {file = "nzpy-1.15.tar.gz", hash = "sha256:6e492b7e7644a5f6f14d542372b4bfed58e8fcb8c79c075e3bfd27a1f7805ba6"},
+]
+
+[package.dependencies]
+scramp = "1.1.0"
+
+[[package]]
+name = "oauth2client"
+version = "4.1.3"
+description = "OAuth 2.0 client library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "oauth2client-4.1.3-py2.py3-none-any.whl", hash = "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac"},
+ {file = "oauth2client-4.1.3.tar.gz", hash = "sha256:d486741e451287f69568a4d26d70d9acd73a2bbfa275746c535b4209891cccc6"},
+]
+
+[package.dependencies]
+httplib2 = ">=0.9.1"
+pyasn1 = ">=0.1.7"
+pyasn1-modules = ">=0.0.5"
+rsa = ">=3.1.4"
+six = ">=1.6.1"
+
+[[package]]
+name = "oauthlib"
+version = "3.2.2"
+description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
+ {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
+]
+
+[package.extras]
+rsa = ["cryptography (>=3.0.0)"]
+signals = ["blinker (>=1.4.0)"]
+signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
+
+[[package]]
+name = "openpyxl"
+version = "3.0.7"
+description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
+optional = false
+python-versions = ">=3.6,"
+files = [
+ {file = "openpyxl-3.0.7-py2.py3-none-any.whl", hash = "sha256:46af4eaf201a89b610fcca177eed957635f88770a5462fb6aae4a2a52b0ff516"},
+ {file = "openpyxl-3.0.7.tar.gz", hash = "sha256:6456a3b472e1ef0facb1129f3c6ef00713cebf62e736cd7a75bcc3247432f251"},
+]
+
+[package.dependencies]
+et-xmlfile = "*"
+
+[[package]]
+name = "oracledb"
+version = "2.1.2"
+description = "Python interface to Oracle Database"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "oracledb-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ffaba9504c638c29129b484cf547accf750bd0f86df1ca6194646a4d2540691"},
+ {file = "oracledb-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d98deb1e3a500920f5460d457925f0c8cef8d037881fdbd16df1c4734453dd"},
+ {file = "oracledb-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bde2df672704fbe12ab0653f6e808b1ed62de28c6864b17fc3a1fcac9c1fd472"},
+ {file = "oracledb-2.1.2-cp310-cp310-win32.whl", hash = "sha256:3b3798a1220fc8736a37b9280d0ae4cdf263bb203fc6e2b3a82c33f9a2010702"},
+ {file = "oracledb-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:92620efd5eb0d23b252d75f2f2ff1deadf25f44546903e3283760cb276d524ed"},
+ {file = "oracledb-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b913a164e1830d0e955b88d97c5e4da4d2402f8a8b0d38febb6ad5a8ef9e4743"},
+ {file = "oracledb-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53827344c6d001f492aee0a3acb6c1b6c0f3030c2f5dc8cb86dc4f0bb4dd1ab"},
+ {file = "oracledb-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50225074841d5f9b281d620c012ced4b0946ff5a941c8b639be7babda5190709"},
+ {file = "oracledb-2.1.2-cp311-cp311-win32.whl", hash = "sha256:a043b4df2919411b787bcd24ffa4286249a11d05d29bb20bb076d108c3c6f777"},
+ {file = "oracledb-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:9edce208c26ee018e43b75323888743031be3e9f0c0e4221abf037129c12d949"},
+ {file = "oracledb-2.1.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:08aa313b801dda950918168d3962ba59a617adce143e0c2bf1ee9b847695faaa"},
+ {file = "oracledb-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de5c932b04d3bcdd22c71c0e5c5e1d16b6a3a2fc68dc472ee3a12e677461354c"},
+ {file = "oracledb-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d590caf39b1901bcba394fcda9815438faff0afaf374025f89ef5d65993d0a4"},
+ {file = "oracledb-2.1.2-cp312-cp312-win32.whl", hash = "sha256:1e3ffdfe76c97d1ca13a3fecf239c96d3889015bb5b775dc22b947108044b01e"},
+ {file = "oracledb-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c1eaf8c74bb6de5772de768f2f3f5eb935ab935c633d3a012ddff7e691a2073"},
+ {file = "oracledb-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2ee06e154e08cc5e4037855d74dc6e37dc054c91a7a1a372bb60d4442e2ed3d"},
+ {file = "oracledb-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a21d84aaf5dddab0cfa8ab7c23272c0295a5c796f212a4ce8a6b499643663dd"},
+ {file = "oracledb-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b337f7cf30753c3a32302fbc25ca80d7ff5049dd9333e681236a674a90c21caf"},
+ {file = "oracledb-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:b5d936763a9b26d32c4e460dbb346c2a962fcc98e6df33dd2d81fdc2eb26f1e4"},
+ {file = "oracledb-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0ea32b87b7202811d85082f10bf7789747ce45f195be4199c5611e7d76a79e78"},
+ {file = "oracledb-2.1.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:f94b22da87e051e3a8620d2b04d99e1cc9d9abb4da6736d6ae0ca436ba03fb86"},
+ {file = "oracledb-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:391034ee66717dba514e765263d08d18a2aa7badde373f82599b89e46fa3720a"},
+ {file = "oracledb-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a2d9891244b9b94465e30af8cc79380bbb41081c5dc0511cbc94cc250e9e26d"},
+ {file = "oracledb-2.1.2-cp38-cp38-win32.whl", hash = "sha256:9a9a6e0bf61952c2c82614b98fe896d2cda17d81ffca4527556e6607b10e3365"},
+ {file = "oracledb-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:d9a6447589f203ca846526c99a667537b099d54ddeff09d24f9da59bdcc8f98b"},
+ {file = "oracledb-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eb688dd1f8ea2038d17bc84fb651aa1e994b155d3cb8b8387df70ab2a7b4c4c"},
+ {file = "oracledb-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f22c31b894bb085a33d70e174c9bcd0abafc630c2c941ff0d630ee3852f1aa6"},
+ {file = "oracledb-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5bc03520b8bd4dbf2ac4d937d298a85a7208ffbeec738eea92ad7bb00e7134a"},
+ {file = "oracledb-2.1.2-cp39-cp39-win32.whl", hash = "sha256:5d4f6bd1036d7edbb96d8d31f0ca53696a013c00ac82fc19ac0ca374d2265b2c"},
+ {file = "oracledb-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:69bde9770392c1c859b1e1d767dbb9ca4c57e3f2946ca90c779d9402a7e96111"},
+ {file = "oracledb-2.1.2.tar.gz", hash = "sha256:3054bcc295d7378834ba7a5aceb865985e954915f9b07a843ea84c3824c6a0b2"},
+]
+
+[package.dependencies]
+cryptography = ">=3.2.1"
+
+[[package]]
+name = "ordered-set"
+version = "4.1.0"
+description = "An OrderedSet is a custom MutableSet that remembers its order, so that every"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"},
+ {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"},
+]
+
+[package.extras]
+dev = ["black", "mypy", "pytest"]
+
+[[package]]
+name = "packaging"
+version = "23.2"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+]
+
+[[package]]
+name = "pandas"
+version = "1.3.4"
+description = "Powerful data structures for data analysis, time series, and statistics"
+optional = false
+python-versions = ">=3.7.1"
+files = [
+ {file = "pandas-1.3.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9707bdc1ea9639c886b4d3be6e2a45812c1ac0c2080f94c31b71c9fa35556f9b"},
+ {file = "pandas-1.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2f44425594ae85e119459bb5abb0748d76ef01d9c08583a667e3339e134218e"},
+ {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:372d72a3d8a5f2dbaf566a5fa5fa7f230842ac80f29a931fb4b071502cf86b9a"},
+ {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99d2350adb7b6c3f7f8f0e5dfb7d34ff8dd4bc0a53e62c445b7e43e163fce63"},
+ {file = "pandas-1.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:4acc28364863127bca1029fb72228e6f473bb50c32e77155e80b410e2068eeac"},
+ {file = "pandas-1.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c2646458e1dce44df9f71a01dc65f7e8fa4307f29e5c0f2f92c97f47a5bf22f5"},
+ {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5298a733e5bfbb761181fd4672c36d0c627320eb999c59c65156c6a90c7e1b4f"},
+ {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22808afb8f96e2269dcc5b846decacb2f526dd0b47baebc63d913bf847317c8f"},
+ {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b528e126c13816a4374e56b7b18bfe91f7a7f6576d1aadba5dee6a87a7f479ae"},
+ {file = "pandas-1.3.4-cp37-cp37m-win32.whl", hash = "sha256:fe48e4925455c964db914b958f6e7032d285848b7538a5e1b19aeb26ffaea3ec"},
+ {file = "pandas-1.3.4-cp37-cp37m-win_amd64.whl", hash = "sha256:eaca36a80acaacb8183930e2e5ad7f71539a66805d6204ea88736570b2876a7b"},
+ {file = "pandas-1.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:42493f8ae67918bf129869abea8204df899902287a7f5eaf596c8e54e0ac7ff4"},
+ {file = "pandas-1.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a388960f979665b447f0847626e40f99af8cf191bce9dc571d716433130cb3a7"},
+ {file = "pandas-1.3.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ba0aac1397e1d7b654fccf263a4798a9e84ef749866060d19e577e927d66e1b"},
+ {file = "pandas-1.3.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f567e972dce3bbc3a8076e0b675273b4a9e8576ac629149cf8286ee13c259ae5"},
+ {file = "pandas-1.3.4-cp38-cp38-win32.whl", hash = "sha256:c1aa4de4919358c5ef119f6377bc5964b3a7023c23e845d9db7d9016fa0c5b1c"},
+ {file = "pandas-1.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:dd324f8ee05925ee85de0ea3f0d66e1362e8c80799eb4eb04927d32335a3e44a"},
+ {file = "pandas-1.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d47750cf07dee6b55d8423471be70d627314277976ff2edd1381f02d52dbadf9"},
+ {file = "pandas-1.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d1dc09c0013d8faa7474574d61b575f9af6257ab95c93dcf33a14fd8d2c1bab"},
+ {file = "pandas-1.3.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10e10a2527db79af6e830c3d5842a4d60383b162885270f8cffc15abca4ba4a9"},
+ {file = "pandas-1.3.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35c77609acd2e4d517da41bae0c11c70d31c87aae8dd1aabd2670906c6d2c143"},
+ {file = "pandas-1.3.4-cp39-cp39-win32.whl", hash = "sha256:003ba92db58b71a5f8add604a17a059f3068ef4e8c0c365b088468d0d64935fd"},
+ {file = "pandas-1.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:a51528192755f7429c5bcc9e80832c517340317c861318fea9cea081b57c9afd"},
+ {file = "pandas-1.3.4.tar.gz", hash = "sha256:a2aa18d3f0b7d538e21932f637fbfe8518d085238b429e4790a35e1e44a96ffc"},
+]
+
+[package.dependencies]
+numpy = [
+ {version = ">=1.17.3", markers = "(platform_machine != \"aarch64\" and platform_machine != \"arm64\") and python_version < \"3.10\""},
+ {version = ">=1.19.2", markers = "platform_machine == \"aarch64\" and python_version < \"3.10\""},
+ {version = ">=1.20.0", markers = "platform_machine == \"arm64\" and python_version < \"3.10\""},
+ {version = ">=1.21.0", markers = "python_version >= \"3.10\""},
+]
+python-dateutil = ">=2.7.3"
+pytz = ">=2017.3"
+
+[package.extras]
+test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"]
+
+[[package]]
+name = "paramiko"
+version = "3.4.0"
+description = "SSH2 protocol library"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"},
+ {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"},
+]
+
+[package.dependencies]
+bcrypt = ">=3.2"
+cryptography = ">=3.3"
+pynacl = ">=1.5"
+
+[package.extras]
+all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"]
+gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"]
+invoke = ["invoke (>=2.0)"]
+
+[[package]]
+name = "parsedatetime"
+version = "2.4"
+description = "Parse human-readable date/time text."
+optional = false
+python-versions = "*"
+files = [
+ {file = "parsedatetime-2.4-py2-none-any.whl", hash = "sha256:9ee3529454bf35c40a77115f5a596771e59e1aee8c53306f346c461b8e913094"},
+ {file = "parsedatetime-2.4.tar.gz", hash = "sha256:3d817c58fb9570d1eec1dd46fa9448cd644eeed4fb612684b02dfda3a79cb84b"},
+]
+
+[package.dependencies]
+future = "*"
+
+[[package]]
+name = "parso"
+version = "0.8.3"
+description = "A Python Parser"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"},
+ {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"},
+]
+
+[package.extras]
+qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
+testing = ["docopt", "pytest (<6.0.0)"]
+
+[[package]]
+name = "passlib"
+version = "1.7.3"
+description = "comprehensive password hashing framework supporting over 30 schemes"
+optional = false
+python-versions = "*"
+files = [
+ {file = "passlib-1.7.3-py2.py3-none-any.whl", hash = "sha256:a203263a2dbb97f3103603c780dce7a7253722951c717b424ed8cd7c72587ae1"},
+ {file = "passlib-1.7.3.tar.gz", hash = "sha256:0fe8b86a900b2885fed00cf5e96f040c7abd61496d65dec4c814e462f8499d8a"},
+]
+
+[package.extras]
+argon2 = ["argon2-cffi (>=18.2.0)"]
+bcrypt = ["bcrypt (>=3.1.0)"]
+build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"]
+totp = ["cryptography"]
+
+[[package]]
+name = "phoenixdb"
+version = "0.7"
+description = "Phoenix database adapter for Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "phoenixdb-0.7.tar.gz", hash = "sha256:0a51c72b1f70d74d9c06772a97257c77f9540a252f780c31592b5dc36ff5c892"},
+]
+
+[package.dependencies]
+protobuf = ">=3.0.0"
+
+[[package]]
+name = "pinotdb"
+version = "5.1.2"
+description = "Python DB-API and SQLAlchemy dialect for Pinot."
+optional = false
+python-versions = ">=3.7,<4"
+files = [
+ {file = "pinotdb-5.1.2-py3-none-any.whl", hash = "sha256:80026d57b2a6e3ae0c0645bd93fb9166d472756d65bbb5a00bbd4f1c93ef8cf1"},
+ {file = "pinotdb-5.1.2.tar.gz", hash = "sha256:70bfa0456aecc8e1029161def20dd8897b084414a888f926014324df152a40bb"},
+]
+
+[package.dependencies]
+ciso8601 = ">=2.1.3,<3.0.0"
+httpx = ">=0.23.0,<0.24.0"
+
+[package.extras]
+sqlalchemy = ["requests (>=2.25.0,<3.0.0)", "sqlalchemy (>=1.4,<2)"]
+
+[[package]]
+name = "platformdirs"
+version = "3.11.0"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
+ {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
+
+[[package]]
+name = "pluggy"
+version = "1.3.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
+ {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "ply"
+version = "3.11"
+description = "Python Lex & Yacc"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
+ {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
+]
+
+[[package]]
+name = "pre-commit"
+version = "3.3.3"
+description = "A framework for managing and maintaining multi-language pre-commit hooks."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"},
+ {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"},
+]
+
+[package.dependencies]
+cfgv = ">=2.0.0"
+identify = ">=1.0.0"
+nodeenv = ">=0.11.1"
+pyyaml = ">=5.1"
+virtualenv = ">=20.10.0"
+
+[[package]]
+name = "prompt-toolkit"
+version = "3.0.43"
+description = "Library for building powerful interactive command lines in Python"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"},
+ {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"},
+]
+
+[package.dependencies]
+wcwidth = "*"
+
+[[package]]
+name = "protobuf"
+version = "3.20.2"
+description = "Protocol Buffers"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "protobuf-3.20.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559"},
+ {file = "protobuf-3.20.2-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804"},
+ {file = "protobuf-3.20.2-cp310-cp310-win32.whl", hash = "sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c"},
+ {file = "protobuf-3.20.2-cp310-cp310-win_amd64.whl", hash = "sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f"},
+ {file = "protobuf-3.20.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0"},
+ {file = "protobuf-3.20.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d"},
+ {file = "protobuf-3.20.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b"},
+ {file = "protobuf-3.20.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359"},
+ {file = "protobuf-3.20.2-cp37-cp37m-win32.whl", hash = "sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe"},
+ {file = "protobuf-3.20.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334"},
+ {file = "protobuf-3.20.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a"},
+ {file = "protobuf-3.20.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0"},
+ {file = "protobuf-3.20.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978"},
+ {file = "protobuf-3.20.2-cp38-cp38-win32.whl", hash = "sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151"},
+ {file = "protobuf-3.20.2-cp38-cp38-win_amd64.whl", hash = "sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3"},
+ {file = "protobuf-3.20.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d"},
+ {file = "protobuf-3.20.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb"},
+ {file = "protobuf-3.20.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3"},
+ {file = "protobuf-3.20.2-cp39-cp39-win32.whl", hash = "sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1"},
+ {file = "protobuf-3.20.2-cp39-cp39-win_amd64.whl", hash = "sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422"},
+ {file = "protobuf-3.20.2-py2.py3-none-any.whl", hash = "sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019"},
+ {file = "protobuf-3.20.2.tar.gz", hash = "sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750"},
+]
+
+[[package]]
+name = "psutil"
+version = "5.9.7"
+description = "Cross-platform lib for process and system monitoring in Python."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+files = [
+ {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"},
+ {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"},
+ {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"},
+ {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"},
+ {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"},
+ {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"},
+ {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"},
+ {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"},
+ {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"},
+ {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"},
+ {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"},
+ {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"},
+ {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"},
+ {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"},
+ {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"},
+ {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"},
+]
+
+[package.extras]
+test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
+
+[[package]]
+name = "psycopg2-binary"
+version = "2.9.6"
+description = "psycopg2 - Python-PostgreSQL Database Adapter"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "psycopg2-binary-2.9.6.tar.gz", hash = "sha256:1f64dcfb8f6e0c014c7f55e51c9759f024f70ea572fbdef123f85318c297947c"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d26e0342183c762de3276cca7a530d574d4e25121ca7d6e4a98e4f05cb8e4df7"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c48d8f2db17f27d41fb0e2ecd703ea41984ee19362cbce52c097963b3a1b4365"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffe9dc0a884a8848075e576c1de0290d85a533a9f6e9c4e564f19adf8f6e54a7"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a76e027f87753f9bd1ab5f7c9cb8c7628d1077ef927f5e2446477153a602f2c"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6460c7a99fc939b849431f1e73e013d54aa54293f30f1109019c56a0b2b2ec2f"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae102a98c547ee2288637af07393dd33f440c25e5cd79556b04e3fca13325e5f"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9972aad21f965599ed0106f65334230ce826e5ae69fda7cbd688d24fa922415e"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a40c00dbe17c0af5bdd55aafd6ff6679f94a9be9513a4c7e071baf3d7d22a70"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cacbdc5839bdff804dfebc058fe25684cae322987f7a38b0168bc1b2df703fb1"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7f0438fa20fb6c7e202863e0d5ab02c246d35efb1d164e052f2f3bfe2b152bd0"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-win32.whl", hash = "sha256:b6c8288bb8a84b47e07013bb4850f50538aa913d487579e1921724631d02ea1b"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:61b047a0537bbc3afae10f134dc6393823882eb263088c271331602b672e52e9"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:964b4dfb7c1c1965ac4c1978b0f755cc4bd698e8aa2b7667c575fb5f04ebe06b"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afe64e9b8ea66866a771996f6ff14447e8082ea26e675a295ad3bdbffdd72afb"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e2ee79e7cf29582ef770de7dab3d286431b01c3bb598f8e05e09601b890081"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfa74c903a3c1f0d9b1c7e7b53ed2d929a4910e272add6700c38f365a6002820"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b83456c2d4979e08ff56180a76429263ea254c3f6552cd14ada95cff1dec9bb8"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0645376d399bfd64da57148694d78e1f431b1e1ee1054872a5713125681cf1be"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e99e34c82309dd78959ba3c1590975b5d3c862d6f279f843d47d26ff89d7d7e1"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ea29fc3ad9d91162c52b578f211ff1c931d8a38e1f58e684c45aa470adf19e2"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4ac30da8b4f57187dbf449294d23b808f8f53cad6b1fc3623fa8a6c11d176dd0"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e78e6e2a00c223e164c417628572a90093c031ed724492c763721c2e0bc2a8df"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-win32.whl", hash = "sha256:1876843d8e31c89c399e31b97d4b9725a3575bb9c2af92038464231ec40f9edb"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b4b24f75d16a89cc6b4cdff0eb6a910a966ecd476d1e73f7ce5985ff1328e9a6"},
+ {file = "psycopg2_binary-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:498807b927ca2510baea1b05cc91d7da4718a0f53cb766c154c417a39f1820a0"},
+ {file = "psycopg2_binary-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0d236c2825fa656a2d98bbb0e52370a2e852e5a0ec45fc4f402977313329174d"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:34b9ccdf210cbbb1303c7c4db2905fa0319391bd5904d32689e6dd5c963d2ea8"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d2222e61f313c4848ff05353653bf5f5cf6ce34df540e4274516880d9c3763"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30637a20623e2a2eacc420059be11527f4458ef54352d870b8181a4c3020ae6b"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8122cfc7cae0da9a3077216528b8bb3629c43b25053284cc868744bfe71eb141"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38601cbbfe600362c43714482f43b7c110b20cb0f8172422c616b09b85a750c5"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c7e62ab8b332147a7593a385d4f368874d5fe4ad4e341770d4983442d89603e3"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2ab652e729ff4ad76d400df2624d223d6e265ef81bb8aa17fbd63607878ecbee"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c83a74b68270028dc8ee74d38ecfaf9c90eed23c8959fca95bd703d25b82c88e"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d4e6036decf4b72d6425d5b29bbd3e8f0ff1059cda7ac7b96d6ac5ed34ffbacd"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:a8c28fd40a4226b4a84bdf2d2b5b37d2c7bd49486b5adcc200e8c7ec991dfa7e"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:51537e3d299be0db9137b321dfb6a5022caaab275775680e0c3d281feefaca6b"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf4499e0a83b7b7edcb8dabecbd8501d0d3a5ef66457200f77bde3d210d5debb"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7e13a5a2c01151f1208d5207e42f33ba86d561b7a89fca67c700b9486a06d0e2"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e0f754d27fddcfd74006455b6e04e6705d6c31a612ec69ddc040a5468e44b4e"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d57c3fd55d9058645d26ae37d76e61156a27722097229d32a9e73ed54819982a"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71f14375d6f73b62800530b581aed3ada394039877818b2d5f7fc77e3bb6894d"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:441cc2f8869a4f0f4bb408475e5ae0ee1f3b55b33f350406150277f7f35384fc"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65bee1e49fa6f9cf327ce0e01c4c10f39165ee76d35c846ade7cb0ec6683e303"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af335bac6b666cc6aea16f11d486c3b794029d9df029967f9938a4bed59b6a19"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cfec476887aa231b8548ece2e06d28edc87c1397ebd83922299af2e051cf2827"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65c07febd1936d63bfde78948b76cd4c2a411572a44ac50719ead41947d0f26b"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-win32.whl", hash = "sha256:4dfb4be774c4436a4526d0c554af0cc2e02082c38303852a36f6456ece7b3503"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:02c6e3cf3439e213e4ee930308dc122d6fb4d4bea9aef4a12535fbd605d1a2fe"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e9182eb20f41417ea1dd8e8f7888c4d7c6e805f8a7c98c1081778a3da2bee3e4"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a6979cf527e2603d349a91060f428bcb135aea2be3201dff794813256c274f1"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8338a271cb71d8da40b023a35d9c1e919eba6cbd8fa20a54b748a332c355d896"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ed340d2b858d6e6fb5083f87c09996506af483227735de6964a6100b4e6a54"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f81e65376e52f03422e1fb475c9514185669943798ed019ac50410fb4c4df232"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb13af3c5dd3a9588000910178de17010ebcccd37b4f9794b00595e3a8ddad3"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4c727b597c6444a16e9119386b59388f8a424223302d0c06c676ec8b4bc1f963"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d67fbdaf177da06374473ef6f7ed8cc0a9dc640b01abfe9e8a2ccb1b1402c1f"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0892ef645c2fabb0c75ec32d79f4252542d0caec1d5d949630e7d242ca4681a3"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:02c0f3757a4300cf379eb49f543fb7ac527fb00144d39246ee40e1df684ab514"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-win32.whl", hash = "sha256:c3dba7dab16709a33a847e5cd756767271697041fbe3fe97c215b1fc1f5c9848"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"},
+]
+
+[[package]]
+name = "ptpython"
+version = "3.0.23"
+description = "Python REPL build on top of prompt_toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ptpython-3.0.23-py2.py3-none-any.whl", hash = "sha256:51069503684169b21e1980734a9ba2e104643b7e6a50d3ca0e5669ea70d9e21c"},
+ {file = "ptpython-3.0.23.tar.gz", hash = "sha256:9fc9bec2cc51bc4000c1224d8c56241ce8a406b3d49ec8dc266f78cd3cd04ba4"},
+]
+
+[package.dependencies]
+appdirs = "*"
+jedi = ">=0.16.0"
+prompt-toolkit = ">=3.0.28,<3.1.0"
+pygments = "*"
+
+[package.extras]
+all = ["black"]
+ptipython = ["ipython"]
+
+[[package]]
+name = "ptvsd"
+version = "4.3.2"
+description = "Remote debugging server for Python support in Visual Studio and Visual Studio Code"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"
+files = [
+ {file = "ptvsd-4.3.2-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:22b699369a18ff28d4d1aa6a452739e50c7b7790cb16c6312d766e023c12fe27"},
+ {file = "ptvsd-4.3.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3f839fe91d9ddca0d6a3a0afd6a1c824be1768498a737ab9333d084c5c3f3591"},
+ {file = "ptvsd-4.3.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:70260b4591c07bff95566d49b6a5dc3051d8558035c43c847bad9a954def46bb"},
+ {file = "ptvsd-4.3.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d2662ec37ee049c0f8f2f9a378abeb7e570d9215c19eaf0a6d7189464195009f"},
+ {file = "ptvsd-4.3.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d9337ebba4d099698982e090b203e85670086c4b29cf1185b2e45cd353a8053e"},
+ {file = "ptvsd-4.3.2-cp34-cp34m-macosx_10_13_x86_64.whl", hash = "sha256:cf09fd4d90c4c42ddd9bf853290f1a80bc2128993a3923bd3b96b68cc1acd03f"},
+ {file = "ptvsd-4.3.2-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:ccc5c533135305709461f545feed5061c608714db38fa0f58e3f848a127b7fde"},
+ {file = "ptvsd-4.3.2-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:de5234bec74c47da668e1a1a21bcc9821af0cbb28b5153df78cd5abc744b29a2"},
+ {file = "ptvsd-4.3.2-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:c893fb9d1c2ef8f980cc00ced3fd90356f86d9f59b58ee97e0e7e622b8860f76"},
+ {file = "ptvsd-4.3.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2bbc121bce3608501998afbe742f02b80e7d26b8fecd38f78b903f22f52a81d9"},
+ {file = "ptvsd-4.3.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:fad06de012a78f277318d0c308dd3d7cc1f67167f3b2e1e2f7c6caf04c03440c"},
+ {file = "ptvsd-4.3.2-cp35-cp35m-win32.whl", hash = "sha256:92d26aa7c8f7ffe41cb4b50a00846027027fa17acdf2d9dd8c24de77b25166c6"},
+ {file = "ptvsd-4.3.2-cp35-cp35m-win_amd64.whl", hash = "sha256:eda10ecd43daacc180a6fbe524992be76a877c3559e2b78016b4ada8fec10273"},
+ {file = "ptvsd-4.3.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:c01204e3f025c3f7252c79c1a8a028246d29e3ef339e1a01ddf652999f47bdea"},
+ {file = "ptvsd-4.3.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c97c71835dde7e67fc7b06398bee1c012559a0784ebda9cf8acaf176c7ae766c"},
+ {file = "ptvsd-4.3.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:612948a045fcf9c8931cd306972902440278f34de7ca684b49d4caeec9f1ec62"},
+ {file = "ptvsd-4.3.2-cp36-cp36m-win32.whl", hash = "sha256:72d114baa5737baf29c8068d1ccdd93cbb332d2030601c888eed0e3761b588d7"},
+ {file = "ptvsd-4.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:58508485a1609a495dd45829bd6d219303cf9edef5ca1f01a9ed8ffaa87f390c"},
+ {file = "ptvsd-4.3.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:20f48ffed42a6beb879c250d82662e175ad59cc46a29c95c6a4472ae413199c5"},
+ {file = "ptvsd-4.3.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b9970e3dc987eb2a6001af6c9d2f726dd6455cfc6d47e0f51925cbdee7ea2157"},
+ {file = "ptvsd-4.3.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1d3d82ecc82186d099992a748556e6e54037f5c5e4d3fc9bba3e2302354be0d4"},
+ {file = "ptvsd-4.3.2-cp37-cp37m-win32.whl", hash = "sha256:10745fbb788001959b4de405198d8bd5243611a88fb5a2e2c6800245bc0ddd74"},
+ {file = "ptvsd-4.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:90cbd082e7a9089664888d0d94aca760202f080133fca8f3fe65c48ed6b9e39d"},
+ {file = "ptvsd-4.3.2-py2.py3-none-any.whl", hash = "sha256:459137736068bb02515040b2ed2738169cb30d69a38e0fd5dffcba255f41e68d"},
+ {file = "ptvsd-4.3.2.zip", hash = "sha256:3b05c06018fdbce5943c50fb0baac695b5c11326f9e21a5266c854306bda28ab"},
+]
+
+[[package]]
+name = "pure-sasl"
+version = "0.6.2"
+description = "Pure Python client SASL implementation"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pure-sasl-0.6.2.tar.gz", hash = "sha256:53c1355f5da95e2b85b2cc9a6af435518edc20c81193faa0eea65fdc835138f4"},
+ {file = "pure_sasl-0.6.2-py2-none-any.whl", hash = "sha256:edb33b1a46eb3c602c0166de0442c0fb41f5ac2bfccbde4775183b105ad89ab2"},
+]
+
+[package.extras]
+gssapi = ["kerberos (>=1.3.0)"]
+
+[[package]]
+name = "pyasn1"
+version = "0.5.1"
+description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"},
+ {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"},
+]
+
+[[package]]
+name = "pyasn1-modules"
+version = "0.3.0"
+description = "A collection of ASN.1-based protocols modules"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
+ {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.4.6,<0.6.0"
+
+[[package]]
+name = "pyathena"
+version = "1.11.5"
+description = "Python DB API 2.0 (PEP 249) client for Amazon Athena"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "PyAthena-1.11.5-py2.py3-none-any.whl", hash = "sha256:8cc5d40236993fe5241bb625e78d0a0a149e629b74569a9636b49168448a7ac8"},
+ {file = "PyAthena-1.11.5.tar.gz", hash = "sha256:86c0f4d10528de44fcd63222506949b010dff36ad57116e4c1274c1cfa9477d0"},
+]
+
+[package.dependencies]
+boto3 = ">=1.4.4"
+botocore = ">=1.5.52"
+future = "*"
+tenacity = ">=4.1.0"
+
+[package.extras]
+pandas = ["pandas (>=0.24.0)", "pyarrow (>=0.15.0)"]
+sqlalchemy = ["sqlalchemy (>=1.0.0,<2.0.0)"]
+
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+]
+
+[[package]]
+name = "pycryptodome"
+version = "3.20.0"
+description = "Cryptographic library for Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"},
+ {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"},
+ {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"},
+ {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"},
+ {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"},
+ {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"},
+ {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"},
+ {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"},
+ {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"},
+ {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"},
+ {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"},
+ {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"},
+ {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"},
+ {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"},
+ {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"},
+ {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"},
+ {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"},
+ {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"},
+ {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"},
+ {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"},
+ {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"},
+ {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"},
+]
+
+[[package]]
+name = "pydgraph"
+version = "2.0.2"
+description = "Official Dgraph client implementation for Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pydgraph-2.0.2-py2.py3-none-any.whl", hash = "sha256:b4c91156aef5ef0899bee52224dab7ad8cf8ae63a82d16011d372f18f91090f7"},
+ {file = "pydgraph-2.0.2.tar.gz", hash = "sha256:38bfa37b785bf5bcf53f58a02ce8417d40c83723c6b2cdbf428437fbc517ce61"},
+]
+
+[package.dependencies]
+grpcio = ">=1.18.0"
+protobuf = ">=3.6.1"
+
+[[package]]
+name = "pydruid"
+version = "0.5.7"
+description = "A Python connector for Druid."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pydruid-0.5.7.tar.gz", hash = "sha256:88d4fa608c96969dc8f8e03cc0cfc4a38c4c3039b4f899db0af896ab0fa6ff23"},
+]
+
+[package.dependencies]
+requests = "*"
+six = ">=1.9.0"
+
+[package.extras]
+async = ["tornado"]
+cli = ["prompt_toolkit (<2.0.0)", "pygments", "tabulate"]
+pandas = ["pandas"]
+sqlalchemy = ["sqlalchemy"]
+
+[[package]]
+name = "pyexasol"
+version = "0.12.0"
+description = "Exasol python driver with extra features"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pyexasol-0.12.0-py3-none-any.whl", hash = "sha256:5b5d2907a570c321a89c2e5bd3c9bcea46b46cea76abf6adc4d92f4929f63fd4"},
+ {file = "pyexasol-0.12.0.tar.gz", hash = "sha256:e15f4eccf7bdef7a4063a8280e625d8913cb2f6e4971088c67f3a33fadbffdd9"},
+]
+
+[package.dependencies]
+rsa = "*"
+websocket-client = ">=0.47.0"
+
+[package.extras]
+encrypt = ["pyopenssl (>=17.5.0)"]
+examples = ["pproxy", "psutil"]
+pandas = ["pandas (>=0.22,!=0.23.1)"]
+rapidjson = ["python-rapidjson"]
+ujson = ["ujson"]
+
+[[package]]
+name = "pygments"
+version = "2.17.2"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
+ {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
+]
+
+[package.extras]
+plugins = ["importlib-metadata"]
+windows-terminal = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "pyhive"
+version = "0.6.1"
+description = "Python interface to Hive"
+optional = false
+python-versions = "*"
+files = [
+ {file = "PyHive-0.6.1.tar.gz", hash = "sha256:a5f2b2f8bcd85a8cd80ab64ff8fbfe1c09515d266650a56f789a8d89ad66d7f4"},
+]
+
+[package.dependencies]
+future = "*"
+python-dateutil = "*"
+
+[package.extras]
+hive = ["sasl (>=0.2.1)", "thrift (>=0.10.0)", "thrift_sasl (>=0.1.0)"]
+presto = ["requests (>=1.0.0)"]
+sqlalchemy = ["sqlalchemy (>=0.8.7)"]
+
+[[package]]
+name = "pyignite"
+version = "0.6.1"
+description = "Apache Ignite binary client Python API"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pyignite-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0da286f6c01a5ecd5ea7d8d5d1d3886af1ae393c003b066ca06f5c274ddac428"},
+ {file = "pyignite-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d5b8e2f3c222d1fb50be6de62860b7eeb117378666e67db7cd358c5b14918e"},
+ {file = "pyignite-0.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c63ec63e812274cd7a0fe611b0eac4e41da9ba829cb47a3b1dc28a66f1dd9a77"},
+ {file = "pyignite-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5185f346b4c5561eda9df15b7365d1b3b08e5ffb11e8f306259aff71c8516175"},
+ {file = "pyignite-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:03f4701474f5ade8983d21c70999b8ebb021c6f5a7b0fb70a7f1339e52d3bf31"},
+ {file = "pyignite-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e50401db905556d46ad1029fa9d6752897c29fe7be0bf671aa8edcfb176f8453"},
+ {file = "pyignite-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2212d7e00cfd3a307696bc3423af1d906963da54fcece24a7913a0ae5ea34ea3"},
+ {file = "pyignite-0.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d9511943dd652a6bd602c923a896a024491d37d5f69ea718c595b3cd3b01236c"},
+ {file = "pyignite-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:50f213a3dca8dfbb6534d8e421ea1ac6d65b782cefb6d3d55c750b352be3779b"},
+ {file = "pyignite-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5aaa1aa32dfb6305d39153f930a4fd952b43347366a73e6643b699b8bdf73d48"},
+ {file = "pyignite-0.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:699261fb4f68a3d470370cb2ac507086aae8a84305bb440f38c7508f4a4ef288"},
+ {file = "pyignite-0.6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9c24ed39b85a492993fbf33238bcea5cf390020bf3d484b55fb78fb78bf9c10"},
+ {file = "pyignite-0.6.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ccb29dfd55510f1cba174f0bdd205470a794b79beb0ad2a6655a8a197a667365"},
+ {file = "pyignite-0.6.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:842aabb7fc3e32e81ae7f2b0a1406688748d209b3b22b2df9055be5812feb3da"},
+ {file = "pyignite-0.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:48ebcc470c464de155a08495ede09338c5479a2e25e1a92ea3008d7dc4d8bcd7"},
+ {file = "pyignite-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4fd49d3c849cb4159916a6c050bd6e083b58841b8a8cc6eab5b94ec0fb7463f6"},
+ {file = "pyignite-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fbd6a919833fcf83e97bd9f79de1771f7249778daf1585b7e159d2506a7534d"},
+ {file = "pyignite-0.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:39ad2c07bb5ff1983b7e78edd32d4e9975d6de4a3966b171ea1d78d39a04b238"},
+ {file = "pyignite-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:da7da4486aea27c5e4927d224a31acb1bc058d2397a60a9f3ed9537755dc75da"},
+ {file = "pyignite-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:dee6efd47860bf21042a8bdf5ad3c6bdf9a55beedfc9431cacb57bda37045a93"},
+ {file = "pyignite-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6bf352c5a8547350d618a8e66059675437249d2ed633655273e9c34d2421aa"},
+ {file = "pyignite-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f60ce18bb64d1031e3366a454a9f344c3ffe95ebc0d86251c6981a23479cbdb"},
+ {file = "pyignite-0.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf717838df0b8c9b86664f761d0cb43dab26f037e6aae404054deaa9ad60590b"},
+ {file = "pyignite-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:73f145c70b99e47f38b94fc89109261e0a5ad284db46531955a4610bf9706d9f"},
+ {file = "pyignite-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:6c327eb1261c26b41c64ff36d290ca5c3aa9dc35b4ffbbb39ae57935fe1914f1"},
+ {file = "pyignite-0.6.1.zip", hash = "sha256:e143d7a907e2662da11f63d2e9e34234dc722cd63c9aeb74486280e3aef3369d"},
+]
+
+[package.dependencies]
+attrs = ">=20.3.0"
+
+[[package]]
+name = "pyjwt"
+version = "2.4.0"
+description = "JSON Web Token implementation in Python"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"},
+ {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"},
+]
+
+[package.extras]
+crypto = ["cryptography (>=3.3.1)"]
+dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"]
+docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
+tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
+
+[[package]]
+name = "pymongo"
+version = "4.6.3"
+description = "Python driver for MongoDB
"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pymongo-4.6.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e344d0afdd7c06c1f1e66a4736593293f432defc2191e6b411fc9c82fa8c5adc"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:731a92dfc4022db763bfa835c6bd160f2d2cba6ada75749c2ed500e13983414b"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c4726e36a2f7e92f09f5b8e92ba4db7525daffe31a0dcbcf0533edc0ade8c7d8"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:00e6cfce111883ca63a3c12878286e0b89871f4b840290e61fb6f88ee0e687be"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:cc7a26edf79015c58eea46feb5b262cece55bc1d4929a8a9e0cbe7e6d6a9b0eb"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:4955be64d943b30f2a7ff98d818ca530f7cb37450bc6b32c37e0e74821907ef8"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:af039afc6d787502c02089759778b550cb2f25dbe2780f5b050a2e37031c3fbf"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc15a7c7a99aed7d0831eaf78a607f1db0c7a255f96e3d18984231acd72f70c"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e97c138d811e9367723fcd07c4402a9211caae20479fdd6301d57762778a69f"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebcc145c74d06296ce0cad35992185064e5cb2aadef719586778c144f0cd4d37"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:664c64b6bdb31aceb80f0556951e5e2bf50d359270732268b4e7af00a1cf5d6c"},
+ {file = "pymongo-4.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4056bc421d4df2c61db4e584415f2b0f1eebb92cbf9222f7f38303467c37117"},
+ {file = "pymongo-4.6.3-cp310-cp310-win32.whl", hash = "sha256:cdbea2aac1a4caa66ee912af3601557d2bda2f9f69feec83601c78c7e53ece64"},
+ {file = "pymongo-4.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:6cec7279e5a1b74b257d0270a8c97943d745811066630a6bc6beb413c68c6a33"},
+ {file = "pymongo-4.6.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:138b9fa18d40401c217bc038a48bcde4160b02d36d8632015b1804971a2eaa2f"},
+ {file = "pymongo-4.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60931b0e07448afe8866ffff764cd5bf4b1a855dc84c7dcb3974c6aa6a377a59"},
+ {file = "pymongo-4.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b35f8bded43ff91475305445fedf0613f880ff7e25c75ae1028e1260a9b7a86"},
+ {file = "pymongo-4.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:872bad5c83f7eec9da11e1fef5f858c6a4c79fe4a83c7780e7b0fe95d560ae3f"},
+ {file = "pymongo-4.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2ad3e5bfcd345c0bfe9af69a82d720860b5b043c1657ffb513c18a0dee19c19"},
+ {file = "pymongo-4.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e208f2ab7b495eff8fd175022abfb0abce6307ac5aee3f4de51fc1a459b71c9"},
+ {file = "pymongo-4.6.3-cp311-cp311-win32.whl", hash = "sha256:4670edbb5ddd71a4d555668ef99b032a5f81b59e4145d66123aa0d831eac7883"},
+ {file = "pymongo-4.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:1c2761302b6cbfd12e239ce1b8061d4cf424a361d199dcb32da534985cae9350"},
+ {file = "pymongo-4.6.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:722f2b709b63311c0efda4fa4c603661faa4bec6bad24a6cc41a3bc6d841bf09"},
+ {file = "pymongo-4.6.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:994386a4d6ad39e18bcede6dc8d1d693ec3ed897b88f86b1841fbc37227406da"},
+ {file = "pymongo-4.6.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:391aea047bba928006114282f175bc8d09c53fe1b7d8920bf888325e229302fe"},
+ {file = "pymongo-4.6.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4330c022024e7994b630199cdae909123e4b0e9cf15335de71b146c0f6a2435"},
+ {file = "pymongo-4.6.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01277a7e183c59081368e4efbde2b8f577014431b257959ca98d3a4e8682dd51"},
+ {file = "pymongo-4.6.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d30d5d7963453b478016bf7b0d87d7089ca24d93dbdecfbc9aa32f1b4772160a"},
+ {file = "pymongo-4.6.3-cp312-cp312-win32.whl", hash = "sha256:a023804a3ac0f85d4510265b60978522368b5815772262e61e3a2222a8b315c9"},
+ {file = "pymongo-4.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:2a6ae9a600bbc2dbff719c98bf5da584fb8a4f2bb23729a09be2e9c3dbc61c8a"},
+ {file = "pymongo-4.6.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:3b909e5b1864de01510079b39bbdc480720c37747be5552b354bc73f02c24a3c"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:48c60bd32ec141c0d45d8471179430003d9fb4490da181b8165fb1dce9cc255c"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:36d7049fc183fe4edda3eae7f66ea14c660921429e082fe90b4b7f4dc6664a70"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:18e5c161b18660f1c9d1f78236de45520a436be65e42b7bb51f25f74ad22bdde"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:e458e6fc2b7dd40d15cda04898bd2d8c9ff7ae086c516bc261628d54eb4e3158"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:e420e74c6db4594a6d09f39b58c0772679006cb0b4fc40901ba608794d87dad2"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:9c9340c7161e112e36ebb97fbba1cdbe7db3dfacb694d2918b1f155a01f3d859"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:26d036e0f5de09d0b21d0fc30314fcf2ae6359e4d43ae109aa6cf27b4ce02d30"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cf28d9c90e40d4e385b858e4095739829f466f23e08674085161d86bb4bb10"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9066dff9dc0a182478ca5885d0b8a2b820b462e19459ada109df7a3ced31b272"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1e1586ebdebe0447a24842480defac17c496430a218486c96e2da3f164c0f05"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3853fb66bf34ce1b6e573e1bbb3cb28763be9d1f57758535757faf1ab2f24a"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:462684a6f5ce6f2661c30eab4d1d459231e0eed280f338e716e31a24fc09ccb3"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a4ea44e5a913bdb7c9abd34c69e9fcfac10dfaf49765463e0dc1ea922dd2a9d"},
+ {file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:098d420a8214ad25f872de7e8b309441995d12ece0376218a04d9ed5d2222cf3"},
+ {file = "pymongo-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:7330245253fbe2e09845069d2f4d35dd27f63e377034c94cb0ddac18bc8b0d82"},
+ {file = "pymongo-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:151361c101600a85cb1c1e0db4e4b28318b521fcafa9b62d389f7342faaaee80"},
+ {file = "pymongo-4.6.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:4d167d546352869125dc86f6fda6dffc627d8a9c8963eaee665825f2520d542b"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:eaf3d594ebfd5e1f3503d81e06a5d78e33cda27418b36c2491c3d4ad4fca5972"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ee79e02a7c5ed34706ecb5dad19e6c7d267cf86d28c075ef3127c58f3081279"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af5c5112db04cf62a5d9d224a24f289aaecb47d152c08a457cca81cee061d5bd"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:6b5aec78aa4840e8d6c3881900259892ab5733a366696ca10d99d68c3d73eaaf"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:9757602fb45c8ecc1883fe6db7c59c19d87eb3c645ec9342d28a6026837da931"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:dde9fb6e105ce054339256a8b7a9775212ebb29596ef4e402d7bbc63b354d202"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:7df8b166d3db6cfead4cf55b481408d8f0935d8bd8d6dbf64507c49ef82c7200"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53451190b8628e1ce7d1fe105dc376c3f10705127bd3b51fe3e107b9ff1851e6"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75107a386d4ccf5291e75cce8ca3898430e7907f4cc1208a17c9efad33a1ea84"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a0660ce32d8459b7f12dc3ca0141528fead62d3cce31b548f96f30902074cc0"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa310096450e9c461b7dfd66cbc1c41771fe36c06200440bb3e062b1d4a06b6e"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f465cca9b178e7bb782f952dd58e9e92f8ba056e585959465f2bb50feddef5f"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c67c19f653053ef2ebd7f1837c2978400058d6d7f66ec5760373a21eaf660158"},
+ {file = "pymongo-4.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c701de8e483fb5e53874aab642235361aac6de698146b02c644389eaa8c137b6"},
+ {file = "pymongo-4.6.3-cp38-cp38-win32.whl", hash = "sha256:90525454546536544307e6da9c81f331a71a1b144e2d038fec587cc9f9250285"},
+ {file = "pymongo-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:3e1ba5a037c526a3f4060c28f8d45d71ed9626e2bf954b0cd9a8dcc3b45172ee"},
+ {file = "pymongo-4.6.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14a82593528cddc93cfea5ee78fac95ae763a3a4e124ca79ee0b24fbbc6da1c9"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:cd6c15242d9306ff1748681c3235284cbe9f807aeaa86cd17d85e72af626e9a7"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6de33f1b2eed91b802ec7abeb92ffb981d052f3604b45588309aae9e0f6e3c02"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0182899aafe830f25cf96c5976d724efeaaf7b6646c15424ad8dd25422b2efe1"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:8d0ea740a2faa56f930dc82c5976d96c017ece26b29a1cddafb58721c7aab960"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:5c8a4982f5eb767c6fbfb8fb378683d09bcab7c3251ba64357eef600d43f6c23"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:becfa816545a48c8e740ac2fd624c1c121e1362072d68ffcf37a6b1be8ea187e"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ff7d1f449fcad23d9bc8e8dc2b9972be38bcd76d99ea5f7d29b2efa929c2a7ff"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e097f877de4d6af13a33ef938bf2a2350f424be5deabf8b857da95f5b080487a"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:705a9bfd619301ee7e985d6f91f68b15dfcb2f6f36b8cc225cc82d4260d2bce5"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ef1b4992ee1cb8bb16745e70afa0c02c5360220a7a8bb4775888721f052d0a6"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d10bdd46cbc35a2109737d36ffbef32e7420569a87904738ad444ccb7ac2c5"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17c1c143ba77d6e21fc8b48e93f0a5ed982a23447434e9ee4fbb6d633402506b"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e51e30d67b468a2a634ade928b30cb3e420127f148a9aec60de33f39087bdc4"},
+ {file = "pymongo-4.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bec8e4e88984be157408f1923d25869e1b575c07711cdbdde596f66931800934"},
+ {file = "pymongo-4.6.3-cp39-cp39-win32.whl", hash = "sha256:98877a9c4ad42df8253a12d8d17a3265781d1feb5c91c767bd153f88feb0b670"},
+ {file = "pymongo-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:6d5b35da9e16cda630baed790ffc3d0d01029d269523a7cec34d2ec7e6823e75"},
+ {file = "pymongo-4.6.3.tar.gz", hash = "sha256:400074090b9a631f120b42c61b222fd743490c133a5d2f99c0208cefcccc964e"},
+]
+
+[package.dependencies]
+dnspython = ">=1.16.0,<3.0.0"
+
+[package.extras]
+aws = ["pymongo-auth-aws (<2.0.0)"]
+encryption = ["certifi", "pymongo[aws]", "pymongocrypt (>=1.6.0,<2.0.0)"]
+gssapi = ["pykerberos", "winkerberos (>=0.5.0)"]
+ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
+snappy = ["python-snappy"]
+test = ["pytest (>=7)"]
+zstd = ["zstandard"]
+
+[[package]]
+name = "pymssql"
+version = "2.3.1"
+description = "DB-API interface to Microsoft SQL Server for Python. (new Cython-based version)"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pymssql-2.3.1-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:001b3321a5f620b80d1427933fcca11b05f29a808d7772a84d18d01e640ee60a"},
+ {file = "pymssql-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15466dd41be5e32302f0c4791f612aadd608a0e6ec0b10d769e76cbb4c86aa97"},
+ {file = "pymssql-2.3.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74349040d4ff6f05894aefb5109ecffcd416e1e366d9951085d3225a9d09c46b"},
+ {file = "pymssql-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc79dbe5eca8825b73830c8bb147b6f588300dc7510393822682162dc4ff003f"},
+ {file = "pymssql-2.3.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0b93ebe2feb45e772ca708bc4cd70f3e4c72796ec1b157fd5d80cdc589c786aa"},
+ {file = "pymssql-2.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:44b1c8752c0fc6750902c1c521f258bdf4271bfbf7b2a5fee469b6ad00631aab"},
+ {file = "pymssql-2.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fdfadb055a9ecad58356decfecc41626999ad7b548cc7ea898cf159e2217f7bb"},
+ {file = "pymssql-2.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:46f1074c6763e9a899128f22a0f72e9fb0035535f48efabd6a294db1c149e6f1"},
+ {file = "pymssql-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ebb11b61d99ec5bbe0b8c411ff748a90263cdaf474881de231da8184e721c42c"},
+ {file = "pymssql-2.3.1-cp310-cp310-win32.whl", hash = "sha256:2ef07fdee3e9652d39b4c081c5c5e1a1031abd122b402ed66813bceb3874ccea"},
+ {file = "pymssql-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:791522339215cb7f88db54c831a2347e0c4d69dd3092a343eea5b9339adf4412"},
+ {file = "pymssql-2.3.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:0433ffa1c86290a93e81176f377621cb70405be66ade8f3070d3f5ec9cfebdba"},
+ {file = "pymssql-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6182d82ebfbe46f0e7748d068c6a1c16c0f4fe1f34f1c390f63375cee79b44b0"},
+ {file = "pymssql-2.3.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfbe07dcf0aaee8ce630624669cb2fb77b76743d4dd925f99331422be8704de3"},
+ {file = "pymssql-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d999c8e5d5d48e9305c4132392825de402f13feea15694e4e7103029b6eae06"},
+ {file = "pymssql-2.3.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2dced0a76d8e99c283103a2e3c825ca22c67f1f8fc5cff657510f4d2ffb9d188"},
+ {file = "pymssql-2.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:880d3173025dea3babf5ab862875b3c76a5cf8df5b292418050c7793c651c0b2"},
+ {file = "pymssql-2.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9f89c698e29ce5c576e4980ded89c00b45e482ec02759bfbfc1aa326648cf64a"},
+ {file = "pymssql-2.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3f4f2a38ce6e39ed2414c20ca16deaea4340868033a4bb23d5e4e30c72290caf"},
+ {file = "pymssql-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e34e8aa1d3da555dbf23141b02f401267c0be32104b4f030afd0bae62d26d735"},
+ {file = "pymssql-2.3.1-cp311-cp311-win32.whl", hash = "sha256:72e57e20802bf97399e050a0760a4541996fc27bc605a1a25e48ca6fe4913c48"},
+ {file = "pymssql-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b5d3604bca2fa8d5ba2eed1582a3c8a83970a8d2edabfcfd87c1edecb7617d16"},
+ {file = "pymssql-2.3.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:c28f1b9560b82fe1a1e51d8c56f6d36bca7c507a8cdf2caa2a0642503c220d5c"},
+ {file = "pymssql-2.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3509b75747eb22ae89f3d47ae316a4b9eac7d952269e88b356ef117a1b8e3b8"},
+ {file = "pymssql-2.3.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cca3bed27e1ab867e482fa8b529d408489ad57e8b60452f75ef288da90573db6"},
+ {file = "pymssql-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fe3276915e6040daec409203e3143aa2826984adb8d223c155dab91010110a4"},
+ {file = "pymssql-2.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d36d566d0d6997c95442c3d2902800e6b072ccc017c6284e5b1bd4e17dc8fada"},
+ {file = "pymssql-2.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3564df40a678623a769acd9677dc68228b2694170132c6f296eb62bf766d31e4"},
+ {file = "pymssql-2.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3dbd4106faabf97f028d0ac59b30d132cfb5e48cf5314b0476f293123dbf3422"},
+ {file = "pymssql-2.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:acd1690d9b1b2ece9d0e1fd7d68571fc9fa56b6ba8697a3132446419ff7fb3f4"},
+ {file = "pymssql-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:126e0b78773975136e6385da7286c277e2e0320c1f4bee0e4dc61a5edcf98c41"},
+ {file = "pymssql-2.3.1-cp312-cp312-win32.whl", hash = "sha256:21803b731b8c8780fc974d9b4931fa8f1ca29c227502a4c317e12773c8bdef43"},
+ {file = "pymssql-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:6b0224fc5ce4cf0703278859f145e3e921c04d9feb59739a104d3020bbf0c0c1"},
+ {file = "pymssql-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:709c1df3134e330ee9590437253be363b558154bde5bb54856fc5fe68a03c971"},
+ {file = "pymssql-2.3.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9381eafaf529815f2d61f22b99e0538e744b31234f17d4384f5b0496bd1fbed"},
+ {file = "pymssql-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3bf78789014f202855f5d00de982bbcd95177fe8bcf920f0ce730b72456c173"},
+ {file = "pymssql-2.3.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4b44280eedd0a3f031e9464d4fc632a215fadcfb375bb479065b61a6337df402"},
+ {file = "pymssql-2.3.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:922f536b925880c260968c8f2130b1c9d6315b83f300f18365b5421933f034a2"},
+ {file = "pymssql-2.3.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f00f618d1c0f58617de548e5094f7d55ab6034b94068d7eebba60a034866b10b"},
+ {file = "pymssql-2.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b363db86a1a3fe16df9b4253e17b02a268d0f2e2753679b8e85cee268e2fe8c4"},
+ {file = "pymssql-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:396a26cf576196cc4a3d77890b2b8eb62655ff02846288757dd8b587352cc4f5"},
+ {file = "pymssql-2.3.1-cp36-cp36m-win32.whl", hash = "sha256:5a1a1c697596f23058697709144d00a44e7af6ecab6a517f2ecf28dcf8fb4280"},
+ {file = "pymssql-2.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:4f92e8657d42341dce01f7f57d03f84b35c0ed00a7bef24533ff80a37ffcfb4e"},
+ {file = "pymssql-2.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:095b50e43bfbc4d6f953810175ba275bb3e6136206f3a7146bdd1031e3f0dd9b"},
+ {file = "pymssql-2.3.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47ac89098732c327725b53464932c6a532367271a3d5c5a988f61e23e0e0e286"},
+ {file = "pymssql-2.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f284fc052cf1dbc702a2f4d13442d87fc6847ba9054faccfc8d8446fcf00894"},
+ {file = "pymssql-2.3.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:40778b65c09eef9e7c25c444b96e76f81d8b5cf1828cb555123d052b7d3b5661"},
+ {file = "pymssql-2.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c8609bc7f8b13d383729ba09042b4d796a607c93779c616be51b37caa6b384"},
+ {file = "pymssql-2.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ab2aea2ae8bc1aba0105fccbf9e4f6716648b2b8f9421fd3418c6cc798fca43e"},
+ {file = "pymssql-2.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e594de69832ad13761412f4d5c981a6e5d931b22f25136c8cd3531d9c6cfdf63"},
+ {file = "pymssql-2.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:68f879b4ec4b2191a1d8b3bb24db04c3631737653785369c275bd5a574e54093"},
+ {file = "pymssql-2.3.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:9ef157e63a1c19e7ab4823237b5f03a3bca45e1e94a4d5ed73baab6d019830c7"},
+ {file = "pymssql-2.3.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:66afe6ee539e37cdfea0c6b2d596ec0d2a6223f09450c4df7cf872bad12691fe"},
+ {file = "pymssql-2.3.1-cp37-cp37m-win32.whl", hash = "sha256:b9cc14a9f63e632200f54311da9868ece2715fa9560f6272c9bb82c57edc0543"},
+ {file = "pymssql-2.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54bc10f28c0acc1347d3c7056e702ad21f128e6bf7737b4edc8c267372db9ce8"},
+ {file = "pymssql-2.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8d955e751fb125be2a8513b5a338457a3fe73e5daa094815f96a86e496f7149"},
+ {file = "pymssql-2.3.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c13ca6eaf0d7f16af9edf87d58070329bfacb7f27b90e1de16318d64c7b873b"},
+ {file = "pymssql-2.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ecb0cdea24e2c019fb403fd642c04a64e8767c79f8dd38451eb5d72ceffce34"},
+ {file = "pymssql-2.3.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:afd57a728e81d73a0f43f3d28216c402fea03bd06a382da881dfc8215fb4080d"},
+ {file = "pymssql-2.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e6f6d9de73309cda602bbb769cb707f08d6899664f3ac6e9ed3e3b1ad472cee"},
+ {file = "pymssql-2.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:02b808dbb86bbe751dd3fd117e83926b0a19ca9d9b833fae945bf2e31be66bf6"},
+ {file = "pymssql-2.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0f1ba9befe23e6c4e75c2a626ffe59d159ab3a425a0208515888ec8670bf5bf"},
+ {file = "pymssql-2.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8eecb4f3b41b8b29a0cbe502ae37b6477063d690151f668c410328f101f6198b"},
+ {file = "pymssql-2.3.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a36c8b089e2d7b606aee823eefdfd72f5df110241fc5d913094b0b9da2692794"},
+ {file = "pymssql-2.3.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:425de7d3f38cd1867c30b7c352d66020f38fdcdf804282ee232f5e25672930c1"},
+ {file = "pymssql-2.3.1-cp38-cp38-win32.whl", hash = "sha256:ce397eb6a2a90fcd2a83d8812c1b8752af3b5362e630da49aa556c947e32ce3d"},
+ {file = "pymssql-2.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:02c4ab7a58bfb57edb2deee7e2aceed2512960e7c2c1fd2cb23c647471a36ba2"},
+ {file = "pymssql-2.3.1-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:750078568dafc1e0a24cf0f51eecfe548b13440976a2c8b19cc6e5d38e7b10bc"},
+ {file = "pymssql-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a651dd98f67eef98f429c949fb50ea0a92fcf8668834cc35909237c24c1b906"},
+ {file = "pymssql-2.3.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1ecedaeec8f4d8643d088b4985f0b742d9669bff701153a845b0d1900260b81"},
+ {file = "pymssql-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:015f6ccd1bcb53f22a3226653d0d8155da40f4afbc1fd0cec25de5fe8decf126"},
+ {file = "pymssql-2.3.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:da44761ca2f996d88f90c0f972b583dfe9c389db84888bd8209cdb83508f7c7a"},
+ {file = "pymssql-2.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9557b738475e06dfd53f97d8a2c2b259b9b9fd79bf1a4e084ae4e9f164be644d"},
+ {file = "pymssql-2.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a1f3f2e2792364a50417f3c2dc0d8f125955c1b641f36eb313daf666045b9748"},
+ {file = "pymssql-2.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:be8af4dea025f171ffb1e5b17cb0c9cbc92b0e3c32d0517bc678fff6f660e5fb"},
+ {file = "pymssql-2.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a87950fb1a2b1c4028064fac971f3e191adebb58657ca985330f70e02f95223e"},
+ {file = "pymssql-2.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9ea04bf8e13d567650631a944c88886c99a5622d9491e896a9b5a9ffbef2e352"},
+ {file = "pymssql-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4d93a82f8ad7d3606354b81bbbe7e7832f70fd6e9ccb2e04a2975117da5df973"},
+ {file = "pymssql-2.3.1-cp39-cp39-win32.whl", hash = "sha256:6a2657152d4007314b66f353a25fc2742155c2770083320b5255fc576103661e"},
+ {file = "pymssql-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:6c9ffb3ef110bf0fc2a41c845f231cf749162b1d71e02b0aceb6c0ebc603e2e9"},
+ {file = "pymssql-2.3.1.tar.gz", hash = "sha256:ddee15c4c193e14c92fe2cd720ca9be1dba1e0f4178240380b8f5f6f00da04c6"},
+]
+
+[[package]]
+name = "pynacl"
+version = "1.5.0"
+description = "Python binding to the Networking and Cryptography (NaCl) library"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"},
+ {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"},
+]
+
+[package.dependencies]
+cffi = ">=1.4.1"
+
+[package.extras]
+docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
+tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
+
+[[package]]
+name = "pyodbc"
+version = "5.1.0"
+description = "DB API module for ODBC"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyodbc-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:02fe9821711a2d14415eaeb4deab471d2c8b7034b107e524e414c0e133c42248"},
+ {file = "pyodbc-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2cbdbd019756285dc44bc35238a3ed8dfaa454e8c8b2c3462f1710cfeebfb290"},
+ {file = "pyodbc-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84df3bbce9bafe65abd25788d55c9f1da304f6115d70f25758ff8c85f3ce0517"},
+ {file = "pyodbc-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:218bb75d4bc67075529a65ce8ec7daeed1d83c33dd7410450fbf68d43d184d28"},
+ {file = "pyodbc-5.1.0-cp310-cp310-win32.whl", hash = "sha256:eae576b3b67d21d6f237e18bb5f3df8323a2258f52c3e3afeef79269704072a9"},
+ {file = "pyodbc-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:c3b65343557f4c7753204e06f4c82c97ed212a636501f4bc27c5ce0e549eb3e8"},
+ {file = "pyodbc-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa6f46377da303bf79bcb4b559899507df4b2559f30dcfdf191358ee4b99f3ab"},
+ {file = "pyodbc-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b19d7f44cfee89901e482f554a88177e83fae76b03c3f830e0023a195d840220"},
+ {file = "pyodbc-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c36448322f8d6479d87c528cf52401a6ea4f509b9637750b67340382b4e1b40"},
+ {file = "pyodbc-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5e0cb79222aad4b31a3602e39b242683c29c6221a16ed43f45f18fd0b73659"},
+ {file = "pyodbc-5.1.0-cp311-cp311-win32.whl", hash = "sha256:92caed9d445815ed3f7e5a1249e29a4600ebc1e99404df81b6ed7671074c9227"},
+ {file = "pyodbc-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a1bd14633e91b7a9814f4fd944c9ebb89fb7f1fd4710c4e3999b5ef041536347"},
+ {file = "pyodbc-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3d9cc4af703c4817b6e604315910b0cf5dcb68056d52b25ca072dd59c52dcbc"},
+ {file = "pyodbc-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:406b8fa2133a7b6a713aa5187dba2d08cf763b5884606bed77610a7660fdfabe"},
+ {file = "pyodbc-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8488c3818f12207650836c5c6f7352f9ff9f56a05a05512145995e497c0bbb1"},
+ {file = "pyodbc-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0df69e3a500791b70b5748c68a79483b24428e4c16027b56aa0305e95c143a4"},
+ {file = "pyodbc-5.1.0-cp312-cp312-win32.whl", hash = "sha256:aa4e02d3a9bf819394510b726b25f1566f8b3f0891ca400ad2d4c8b86b535b78"},
+ {file = "pyodbc-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:33f4984af38872e7bdec78007a34e4d43ae72bf9d0bae3344e79d9d0db157c0e"},
+ {file = "pyodbc-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29425e2d366e7f5828b76c7993f412a3db4f18bd5bcee00186c00b5a5965e205"},
+ {file = "pyodbc-5.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a2bbd2e75c77dee9f3cd100c3246110abaeb9af3f7fa304ccc2934ff9c6a4fa4"},
+ {file = "pyodbc-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3602136a936bc0c1bb9722eb2fbf2042b3ff1ddccdc4688e514b82d4b831563b"},
+ {file = "pyodbc-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bed1c843565d3a4fd8c332ebceaf33efe817657a0505eacb97dd1b786a985b0b"},
+ {file = "pyodbc-5.1.0-cp38-cp38-win32.whl", hash = "sha256:735f6da3762e5856b5580be0ed96bb946948346ebd1e526d5169a5513626a67a"},
+ {file = "pyodbc-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c5bb4e43f6c72f5fa2c634570e0d761767d8ea49f39205229b812fb4d3fe05aa"},
+ {file = "pyodbc-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33f0f1d7764cefef6f787936bd6359670828a6086be67518ab951f1f7f503cda"},
+ {file = "pyodbc-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be3b1c36c31ec7d73d0b34a8ad8743573763fadd8f2bceef1e84408252b48dce"},
+ {file = "pyodbc-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e71a51c252b503b4d753e21ed31e640015fc0d00202d42ea42f2396fcc924b4a"},
+ {file = "pyodbc-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af5282cc8b667af97d76f4955250619a53f25486cbb6b1f45a06b781006ffa0b"},
+ {file = "pyodbc-5.1.0-cp39-cp39-win32.whl", hash = "sha256:96b2a8dc27693a517e3aad3944a7faa8be95d40d7ec1eda51a1885162eedfa33"},
+ {file = "pyodbc-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:e738c5eedb4a0cbab20cc008882f49b106054499db56864057c2530ff208cf32"},
+ {file = "pyodbc-5.1.0.tar.gz", hash = "sha256:397feee44561a6580be08cedbe986436859563f4bb378f48224655c8e987ea60"},
+]
+
+[[package]]
+name = "pyopenssl"
+version = "24.2.1"
+description = "Python wrapper module around the OpenSSL library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"},
+ {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"},
+]
+
+[package.dependencies]
+cryptography = ">=41.0.5,<44"
+
+[package.extras]
+docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"]
+test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"]
+
+[[package]]
+name = "pyparsing"
+version = "2.4.7"
+description = "Python parsing module"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
+ {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
+]
+
+[[package]]
+name = "pypd"
+version = "1.1.0"
+description = "A python client for PagerDuty API"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pypd-1.1.0-py2-none-any.whl", hash = "sha256:a16b86f5061fb272c7050d097d07868822ad2b127dad656598f8486fb3678866"},
+ {file = "pypd-1.1.0.tar.gz", hash = "sha256:e955f7bd2adb059e576308ef11e437bdd8d1ddca14b599a9250f6f78a6c70694"},
+]
+
+[package.dependencies]
+requests = "*"
+six = "*"
+
+[[package]]
+name = "pyrsistent"
+version = "0.20.0"
+description = "Persistent/Functional/Immutable data structures"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"},
+ {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"},
+ {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"},
+ {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"},
+ {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"},
+ {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"},
+ {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"},
+ {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"},
+ {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"},
+ {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"},
+ {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"},
+ {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"},
+ {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"},
+ {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"},
+ {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"},
+ {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"},
+ {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"},
+ {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"},
+ {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"},
+ {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"},
+ {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"},
+ {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"},
+ {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"},
+ {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"},
+ {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"},
+ {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"},
+ {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"},
+ {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"},
+ {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"},
+ {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"},
+ {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"},
+ {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"},
+]
+
+[[package]]
+name = "pysaml2"
+version = "7.3.1"
+description = "Python implementation of SAML Version 2 Standard"
+optional = false
+python-versions = ">=3.6.2,<4.0.0"
+files = [
+ {file = "pysaml2-7.3.1-py3-none-any.whl", hash = "sha256:2cc66e7a371d3f5ff9601f0ed93b5276cca816fce82bb38447d5a0651f2f5193"},
+ {file = "pysaml2-7.3.1.tar.gz", hash = "sha256:eab22d187c6dd7707c58b5bb1688f9b8e816427667fc99d77f54399e15cd0a0a"},
+]
+
+[package.dependencies]
+cryptography = ">=3.1"
+defusedxml = "*"
+importlib-resources = {version = "*", markers = "python_version < \"3.9\""}
+pyopenssl = "*"
+python-dateutil = "*"
+pytz = "*"
+requests = ">=2,<3"
+xmlschema = ">=1.2.1"
+
+[package.extras]
+s2repoze = ["paste", "repoze.who", "zope.interface"]
+
+[[package]]
+name = "pystache"
+version = "0.6.0"
+description = "Mustache for Python"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pystache-0.6.0.tar.gz", hash = "sha256:93bf92b2149a4c4b58d12142e2c4c6dd5c08d89e4c95afccd4b6efe2ee1d470d"},
+]
+
+[package.extras]
+cov = ["coverage", "coverage_python_version"]
+test = ["nose"]
+
+[[package]]
+name = "pytest"
+version = "7.4.0"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"},
+ {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-cov"
+version = "4.1.0"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
+ {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
+]
+
+[package.dependencies]
+coverage = {version = ">=5.2.1", extras = ["toml"]}
+pytest = ">=4.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
+
+[[package]]
+name = "python-arango"
+version = "6.1.0"
+description = "Python Driver for ArangoDB"
+optional = false
+python-versions = "*"
+files = [
+ {file = "python-arango-6.1.0.tar.gz", hash = "sha256:04dbb017945105925c01d05ac98a62d718d29586cf39ef85ae3a44f032923b1c"},
+ {file = "python_arango-6.1.0-py2.py3-none-any.whl", hash = "sha256:ca31ceb555ff7c9671ada61c6a81abdcd24b308e76a27d91398a98320f9b27db"},
+]
+
+[package.dependencies]
+PyJWT = "*"
+requests = "*"
+requests-toolbelt = "*"
+six = "*"
+
+[[package]]
+name = "python-dateutil"
+version = "2.8.0"
+description = "Extensions to the standard Python datetime module"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "python-dateutil-2.8.0.tar.gz", hash = "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"},
+ {file = "python_dateutil-2.8.0-py2.py3-none-any.whl", hash = "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "python-dotenv"
+version = "0.19.2"
+description = "Read key-value pairs from a .env file and set them as environment variables"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "python-dotenv-0.19.2.tar.gz", hash = "sha256:a5de49a31e953b45ff2d2fd434bbc2670e8db5273606c1e737cc6b93eff3655f"},
+ {file = "python_dotenv-0.19.2-py2.py3-none-any.whl", hash = "sha256:32b2bdc1873fd3a3c346da1c6db83d0053c3c62f28f1f38516070c4c8971b1d3"},
+]
+
+[package.extras]
+cli = ["click (>=5.0)"]
+
+[[package]]
+name = "python-rapidjson"
+version = "1.20"
+description = "Python wrapper around rapidjson"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "python_rapidjson-1.20-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeaa8487fdd8db409bd2e0c41c59cee3b9f1d08401fc75520f7d35c7a22d8789"},
+ {file = "python_rapidjson-1.20-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:425c2bb8e778a04497953482c251944b2736f61012d897f17b73da3eca060c27"},
+ {file = "python_rapidjson-1.20-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7cbbff9696ea01dd8a29502cb314471c9a5d4239f2f3b7e35b6adbde2cc620"},
+ {file = "python_rapidjson-1.20-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83a48f96d0abb8349a4d42f029259b755d8c6fd347f5de2d640e164c3f45e63b"},
+ {file = "python_rapidjson-1.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cb3ad353ec083a6dcf0552f1fce3c490f92e2fccf9a81eac42835297a8431a1"},
+ {file = "python_rapidjson-1.20-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f7b6574887d8828f34eb3384092d6e6c290e8fbb12703c409dbdde814612657"},
+ {file = "python_rapidjson-1.20-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:403e4986484f01f79fdce00b48c12a1b39d16e822cd37c60843ab26455ab0680"},
+ {file = "python_rapidjson-1.20-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e3f89a58d7709d5879586e9dbfd11be76a799e8fbdbb5eddaffaeba9b572fba3"},
+ {file = "python_rapidjson-1.20-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:b0d07d4f0ebbb2228d5140463f11ac519147b9d791f7e40b3edf518a806be3cc"},
+ {file = "python_rapidjson-1.20-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a5fb413414b92763a54d53b732df3c9de1b114012c8881a3d1215a19b9fca494"},
+ {file = "python_rapidjson-1.20-cp310-cp310-win32.whl", hash = "sha256:9831430f17101a6a249e07db9c42d26c3263e6009450722cce0c14726421f434"},
+ {file = "python_rapidjson-1.20-cp310-cp310-win_amd64.whl", hash = "sha256:fbff5caf127c5bed4d6620f95a039dd9e293784d844af50782aaf278a743acb4"},
+ {file = "python_rapidjson-1.20-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:328095d6d558090c29d24d889482b10dcc3ade3b77c93a61ea86794623046628"},
+ {file = "python_rapidjson-1.20-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc7a095f77eb3bb6acff94acf868a100faaf06028c4b513428f161cd55030476"},
+ {file = "python_rapidjson-1.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce4cee141c924300cbedba1e5bea05b13484598d1e550afc5b50209ba73c62f2"},
+ {file = "python_rapidjson-1.20-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4355bcfc8629d15f6246011b40e84cc368d842518a91adb15c5eba211305ee5b"},
+ {file = "python_rapidjson-1.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dd9c5e661d17eafa44b2875f6ce55178cc87388575ce3cd3c606d5a33772b49"},
+ {file = "python_rapidjson-1.20-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd978c7669cc844f669a48d2a6019fb9134a2385536f806fe265a1e374c3573a"},
+ {file = "python_rapidjson-1.20-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fc52405435ce875aa000afa2637ea267eb0d4ab9622f9b97c92d92cb1a9c440"},
+ {file = "python_rapidjson-1.20-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bef1eca712fb9fd5d2edd724dd1dd8a608215d6afcaee4f351b3e99e3f73f720"},
+ {file = "python_rapidjson-1.20-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6355cb690bf64629767206524d4d00da909970d46d8fc0b367f339975e4eb419"},
+ {file = "python_rapidjson-1.20-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f974c4e11be833221062fc4c3129bed172082792b33ef9fc1b8104f49c514f1d"},
+ {file = "python_rapidjson-1.20-cp311-cp311-win32.whl", hash = "sha256:06ee7bcf660ebbdf1953aa7bf74214b722d934928c7b9f2a23b12e0713b61fa4"},
+ {file = "python_rapidjson-1.20-cp311-cp311-win_amd64.whl", hash = "sha256:9df543521fa4b69589c42772b2f32a6c334b3b5fc612cd6dc3705136d0788da3"},
+ {file = "python_rapidjson-1.20-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6056fcc8caeb9b04775bf655568bba362c7670ab792c1b438671bb056db954cd"},
+ {file = "python_rapidjson-1.20-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:225bd4cbabfe7910261cbcebb8b811d4ff98e90cdd17c233b916c6aa71a9553f"},
+ {file = "python_rapidjson-1.20-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:026077b663acf93a3f2b1adb87282e611a30214b8ae8001b7e4863a3b978e646"},
+ {file = "python_rapidjson-1.20-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:884e1dd4c0770ed424737941af4d5dc9014995f9c33595f151af13f83ce282c3"},
+ {file = "python_rapidjson-1.20-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f55531c8197cb7a21a5ef0ffa46f2b8fc8c5fe7c6fd08bdbd2063ae65d2ff65"},
+ {file = "python_rapidjson-1.20-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c60121d155562dc694c05ed7df4e39e42ee1d3adff2a060c64a004498e6451f7"},
+ {file = "python_rapidjson-1.20-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a6620eed0b04196f37fab7048c1d672d03391bb29d7f09ee8fee8dea33f11f4"},
+ {file = "python_rapidjson-1.20-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ddb63eff401ce7cf20cdd5e21942fc23fbe0e1dc1d96d7ae838645fb1f74fb47"},
+ {file = "python_rapidjson-1.20-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:05e28c3dbb4a0d74ec13af9668ef2b9f302edf83cf7ce1d8316a95364720eec0"},
+ {file = "python_rapidjson-1.20-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b733978ecd84fc5df9a778ce821dc1f3113f7bfc2493cac0bb17efb4ae0bb8fa"},
+ {file = "python_rapidjson-1.20-cp312-cp312-win32.whl", hash = "sha256:d87041448cec00e2db5d858625a76dc1b59eef6691a039acff6d92ad8581cfc1"},
+ {file = "python_rapidjson-1.20-cp312-cp312-win_amd64.whl", hash = "sha256:5d3be149ce5475f9605f01240487541057792abad94d3fd0cd56af363cf5a4dc"},
+ {file = "python_rapidjson-1.20-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:daee815b4c20ca6e4dbc6bde373dd3f65b53813d775f1c94b765b33b402513a7"},
+ {file = "python_rapidjson-1.20-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:083df379c769b30f9bc40041c91fd9d8f7bb8ca2b3c7170258842aced2098e05"},
+ {file = "python_rapidjson-1.20-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9399ad75a2e3377f9e6208caabe73eb9354cd01b732407475ccadcd42c577df"},
+ {file = "python_rapidjson-1.20-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:599ab208ccf6172d6cfac1abe048c837e62612f91f97d198e32773c45346a0b4"},
+ {file = "python_rapidjson-1.20-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf3c0e2a5b97b0d07311f15f0dce4434e43dec865c3794ad1b10d968460fd665"},
+ {file = "python_rapidjson-1.20-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8064b8edb57ddd9e3ffa539cf2ec2f03515751fb0698b40ba5cb66a2123af19"},
+ {file = "python_rapidjson-1.20-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc79d7f00f7538e027960ca6bcd1e03ed99fcf660d4d882d1c22f641155d0db0"},
+ {file = "python_rapidjson-1.20-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:87aa0b01b8c20984844f1440b8ff6bdb32de911a1750fed344b9daed33b4b52b"},
+ {file = "python_rapidjson-1.20-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4099cb9eae8a0ce19c09e02729eb6d69d5180424f13a2641a6c407d053e47a82"},
+ {file = "python_rapidjson-1.20-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c680cd2b4de760ff6875de71fe6a87bd610aa116593d62e4f81a563be86ae18"},
+ {file = "python_rapidjson-1.20-cp313-cp313-win32.whl", hash = "sha256:9e431a7afc77aa874fed537c9f6bf5fcecaef124ebeae2a2379d3b9e9adce74b"},
+ {file = "python_rapidjson-1.20-cp313-cp313-win_amd64.whl", hash = "sha256:7444bc7e6a04c03d6ed748b5dab0798fa2b3f2b303be8c38d3af405b2cac6d63"},
+ {file = "python_rapidjson-1.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69e702fe74fe8c44c6253bb91364a270dc49f704920c90e01040155bd600a5fd"},
+ {file = "python_rapidjson-1.20-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b9496b1e9d6247e8802ac559b7eebb5f3cae426d1c1dbde4049c63dff0941370"},
+ {file = "python_rapidjson-1.20-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1446e902b6c781f271bf8556da636c1375cbb208e25f92e1af4cc2d92cf0cf15"},
+ {file = "python_rapidjson-1.20-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:368ecdf4031abbde9c94aac40981d9a1238e6bcfef9fbfee441047b4757d6033"},
+ {file = "python_rapidjson-1.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:924f9ea302494d4a4d540d3509f8f1f15622ea7d614c6f29df3188d52c6cb546"},
+ {file = "python_rapidjson-1.20-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:632acb2dfa29883723e24bb2ce47c726edd5f672341553a5184db68f78d3bd09"},
+ {file = "python_rapidjson-1.20-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c2f85da53286e67778d4061ef32ff44ca9b5f945030463716e046ee8985319f8"},
+ {file = "python_rapidjson-1.20-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c05c8602c019cc0db19601fdc4927755a9d33f21d01beb3d5767313d7a81360d"},
+ {file = "python_rapidjson-1.20-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7d36aab758bfb1b59e0a849cd20e971eda951a04d3586bb5f6cb460bfc7c103d"},
+ {file = "python_rapidjson-1.20-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e5774c905034362298312116f9b58c181e91a09800e4e5cede7b3d460a6a9fde"},
+ {file = "python_rapidjson-1.20-cp38-cp38-win32.whl", hash = "sha256:488d0c6155004b5177225eaf331bb1838616da05ae966dd24a7d442751c1d193"},
+ {file = "python_rapidjson-1.20-cp38-cp38-win_amd64.whl", hash = "sha256:00183c4938cd491b98b1a43626bc5a381842ceba87644cb91b25555f3fc3c0bf"},
+ {file = "python_rapidjson-1.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f510ffe32fec319699f0c1ea9cee5bde47c33202b034b85c5d1b9ace682aa96a"},
+ {file = "python_rapidjson-1.20-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2b624b3613fb7b8dfef4adc709bf39489be8c655cd9d24dc4e2cc16fc5def83"},
+ {file = "python_rapidjson-1.20-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9f813a37d1f708a221f1f7d8c97c437d10597261810c1d3b52cf8f248d66c0"},
+ {file = "python_rapidjson-1.20-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c3f7085c52259c56af72462df7620c3b8bb95575fd9b8c3a073728855e93269"},
+ {file = "python_rapidjson-1.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871f2eeb0907f3d7ab09efe04c5b5e2886c275ea568f7867c97468ae14cdd52f"},
+ {file = "python_rapidjson-1.20-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7c0408e7f52f32cf4bdd5aa305f005914b0143cac69d42575e2d40e8678cd72"},
+ {file = "python_rapidjson-1.20-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ec17a18df700e1f956fc5a0c41cbb3cc746c44c0fef38988efba9b2cb607ecfa"},
+ {file = "python_rapidjson-1.20-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1c0303bd445312a78485a9adba06dfdb84561c5157a9cda7999fefb36df4c6cc"},
+ {file = "python_rapidjson-1.20-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:303b079ef268a996242be51ae80c8b563ee2d73489ab4f16199fef2216e80765"},
+ {file = "python_rapidjson-1.20-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5adcef7a27abafbb2b3d0b02c822dfd9b4b329769cb97810b7f9733e1fda0498"},
+ {file = "python_rapidjson-1.20-cp39-cp39-win32.whl", hash = "sha256:3e963e78fff6ab5ab2ae847b65683774c48b9b192307380f2175540d6423fd73"},
+ {file = "python_rapidjson-1.20-cp39-cp39-win_amd64.whl", hash = "sha256:1fc3bba6632ecffeb1897fdf98858dc50a677237f4241853444c70a041158a90"},
+ {file = "python_rapidjson-1.20.tar.gz", hash = "sha256:115f08c86d2df7543c02605e77c84727cdabc4b08310d2f097e953efeaaa73eb"},
+]
+
+[[package]]
+name = "pytz"
+version = "2023.3.post1"
+description = "World timezone definitions, modern and historical"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"},
+ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
+]
+
+[[package]]
+name = "pytz-deprecation-shim"
+version = "0.1.0.post0"
+description = "Shims to make deprecation of pytz easier"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"},
+ {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"},
+]
+
+[package.dependencies]
+"backports.zoneinfo" = {version = "*", markers = "python_version >= \"3.6\" and python_version < \"3.9\""}
+tzdata = {version = "*", markers = "python_version >= \"3.6\""}
+
+[[package]]
+name = "pyyaml"
+version = "6.0.1"
+description = "YAML parser and emitter for Python"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
+ {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
+ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+]
+
+[[package]]
+name = "rdflib"
+version = "6.3.2"
+description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information."
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"},
+ {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"},
+]
+
+[package.dependencies]
+isodate = ">=0.6.0,<0.7.0"
+pyparsing = ">=2.1.0,<4"
+
+[package.extras]
+berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"]
+html = ["html5lib (>=1.0,<2.0)"]
+lxml = ["lxml (>=4.3.0,<5.0.0)"]
+networkx = ["networkx (>=2.0.0,<3.0.0)"]
+
+[[package]]
+name = "reactivex"
+version = "4.0.4"
+description = "ReactiveX (Rx) for Python"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "reactivex-4.0.4-py3-none-any.whl", hash = "sha256:0004796c420bd9e68aad8e65627d85a8e13f293de76656165dffbcb3a0e3fb6a"},
+ {file = "reactivex-4.0.4.tar.gz", hash = "sha256:e912e6591022ab9176df8348a653fe8c8fa7a301f26f9931c9d8c78a650e04e8"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.1.1,<5.0.0"
+
+[[package]]
+name = "redis"
+version = "4.6.0"
+description = "Python client for Redis database and key-value store"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"},
+ {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"},
+]
+
+[package.dependencies]
+async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""}
+
+[package.extras]
+hiredis = ["hiredis (>=1.0.0)"]
+ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
+
+[[package]]
+name = "regex"
+version = "2023.8.8"
+description = "Alternative regular expression module, to replace re."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "regex-2023.8.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88900f521c645f784260a8d346e12a1590f79e96403971241e64c3a265c8ecdb"},
+ {file = "regex-2023.8.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3611576aff55918af2697410ff0293d6071b7e00f4b09e005d614686ac4cd57c"},
+ {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a0ccc8f2698f120e9e5742f4b38dc944c38744d4bdfc427616f3a163dd9de5"},
+ {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c662a4cbdd6280ee56f841f14620787215a171c4e2d1744c9528bed8f5816c96"},
+ {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf0633e4a1b667bfe0bb10b5e53fe0d5f34a6243ea2530eb342491f1adf4f739"},
+ {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551ad543fa19e94943c5b2cebc54c73353ffff08228ee5f3376bd27b3d5b9800"},
+ {file = "regex-2023.8.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54de2619f5ea58474f2ac211ceea6b615af2d7e4306220d4f3fe690c91988a61"},
+ {file = "regex-2023.8.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ec4b3f0aebbbe2fc0134ee30a791af522a92ad9f164858805a77442d7d18570"},
+ {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ae646c35cb9f820491760ac62c25b6d6b496757fda2d51be429e0e7b67ae0ab"},
+ {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca339088839582d01654e6f83a637a4b8194d0960477b9769d2ff2cfa0fa36d2"},
+ {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d9b6627408021452dcd0d2cdf8da0534e19d93d070bfa8b6b4176f99711e7f90"},
+ {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:bd3366aceedf274f765a3a4bc95d6cd97b130d1dda524d8f25225d14123c01db"},
+ {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7aed90a72fc3654fba9bc4b7f851571dcc368120432ad68b226bd593f3f6c0b7"},
+ {file = "regex-2023.8.8-cp310-cp310-win32.whl", hash = "sha256:80b80b889cb767cc47f31d2b2f3dec2db8126fbcd0cff31b3925b4dc6609dcdb"},
+ {file = "regex-2023.8.8-cp310-cp310-win_amd64.whl", hash = "sha256:b82edc98d107cbc7357da7a5a695901b47d6eb0420e587256ba3ad24b80b7d0b"},
+ {file = "regex-2023.8.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1e7d84d64c84ad97bf06f3c8cb5e48941f135ace28f450d86af6b6512f1c9a71"},
+ {file = "regex-2023.8.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce0f9fbe7d295f9922c0424a3637b88c6c472b75eafeaff6f910494a1fa719ef"},
+ {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06c57e14ac723b04458df5956cfb7e2d9caa6e9d353c0b4c7d5d54fcb1325c46"},
+ {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7a9aaa5a1267125eef22cef3b63484c3241aaec6f48949b366d26c7250e0357"},
+ {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b7408511fca48a82a119d78a77c2f5eb1b22fe88b0d2450ed0756d194fe7a9a"},
+ {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14dc6f2d88192a67d708341f3085df6a4f5a0c7b03dec08d763ca2cd86e9f559"},
+ {file = "regex-2023.8.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48c640b99213643d141550326f34f0502fedb1798adb3c9eb79650b1ecb2f177"},
+ {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0085da0f6c6393428bf0d9c08d8b1874d805bb55e17cb1dfa5ddb7cfb11140bf"},
+ {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:964b16dcc10c79a4a2be9f1273fcc2684a9eedb3906439720598029a797b46e6"},
+ {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7ce606c14bb195b0e5108544b540e2c5faed6843367e4ab3deb5c6aa5e681208"},
+ {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:40f029d73b10fac448c73d6eb33d57b34607f40116e9f6e9f0d32e9229b147d7"},
+ {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3b8e6ea6be6d64104d8e9afc34c151926f8182f84e7ac290a93925c0db004bfd"},
+ {file = "regex-2023.8.8-cp311-cp311-win32.whl", hash = "sha256:942f8b1f3b223638b02df7df79140646c03938d488fbfb771824f3d05fc083a8"},
+ {file = "regex-2023.8.8-cp311-cp311-win_amd64.whl", hash = "sha256:51d8ea2a3a1a8fe4f67de21b8b93757005213e8ac3917567872f2865185fa7fb"},
+ {file = "regex-2023.8.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e951d1a8e9963ea51efd7f150450803e3b95db5939f994ad3d5edac2b6f6e2b4"},
+ {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704f63b774218207b8ccc6c47fcef5340741e5d839d11d606f70af93ee78e4d4"},
+ {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22283c769a7b01c8ac355d5be0715bf6929b6267619505e289f792b01304d898"},
+ {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91129ff1bb0619bc1f4ad19485718cc623a2dc433dff95baadbf89405c7f6b57"},
+ {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de35342190deb7b866ad6ba5cbcccb2d22c0487ee0cbb251efef0843d705f0d4"},
+ {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b993b6f524d1e274a5062488a43e3f9f8764ee9745ccd8e8193df743dbe5ee61"},
+ {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3026cbcf11d79095a32d9a13bbc572a458727bd5b1ca332df4a79faecd45281c"},
+ {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:293352710172239bf579c90a9864d0df57340b6fd21272345222fb6371bf82b3"},
+ {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d909b5a3fff619dc7e48b6b1bedc2f30ec43033ba7af32f936c10839e81b9217"},
+ {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3d370ff652323c5307d9c8e4c62efd1956fb08051b0e9210212bc51168b4ff56"},
+ {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:b076da1ed19dc37788f6a934c60adf97bd02c7eea461b73730513921a85d4235"},
+ {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e9941a4ada58f6218694f382e43fdd256e97615db9da135e77359da257a7168b"},
+ {file = "regex-2023.8.8-cp36-cp36m-win32.whl", hash = "sha256:a8c65c17aed7e15a0c824cdc63a6b104dfc530f6fa8cb6ac51c437af52b481c7"},
+ {file = "regex-2023.8.8-cp36-cp36m-win_amd64.whl", hash = "sha256:aadf28046e77a72f30dcc1ab185639e8de7f4104b8cb5c6dfa5d8ed860e57236"},
+ {file = "regex-2023.8.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:423adfa872b4908843ac3e7a30f957f5d5282944b81ca0a3b8a7ccbbfaa06103"},
+ {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ae594c66f4a7e1ea67232a0846649a7c94c188d6c071ac0210c3e86a5f92109"},
+ {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e51c80c168074faa793685656c38eb7a06cbad7774c8cbc3ea05552d615393d8"},
+ {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b7f4c66aa9d1522b06e31a54f15581c37286237208df1345108fcf4e050c18"},
+ {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e73e5243af12d9cd6a9d6a45a43570dbe2e5b1cdfc862f5ae2b031e44dd95a8"},
+ {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941460db8fe3bd613db52f05259c9336f5a47ccae7d7def44cc277184030a116"},
+ {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f0ccf3e01afeb412a1a9993049cb160d0352dba635bbca7762b2dc722aa5742a"},
+ {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2e9216e0d2cdce7dbc9be48cb3eacb962740a09b011a116fd7af8c832ab116ca"},
+ {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:5cd9cd7170459b9223c5e592ac036e0704bee765706445c353d96f2890e816c8"},
+ {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4873ef92e03a4309b3ccd8281454801b291b689f6ad45ef8c3658b6fa761d7ac"},
+ {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:239c3c2a339d3b3ddd51c2daef10874410917cd2b998f043c13e2084cb191684"},
+ {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1005c60ed7037be0d9dea1f9c53cc42f836188227366370867222bda4c3c6bd7"},
+ {file = "regex-2023.8.8-cp37-cp37m-win32.whl", hash = "sha256:e6bd1e9b95bc5614a7a9c9c44fde9539cba1c823b43a9f7bc11266446dd568e3"},
+ {file = "regex-2023.8.8-cp37-cp37m-win_amd64.whl", hash = "sha256:9a96edd79661e93327cfeac4edec72a4046e14550a1d22aa0dd2e3ca52aec921"},
+ {file = "regex-2023.8.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f2181c20ef18747d5f4a7ea513e09ea03bdd50884a11ce46066bb90fe4213675"},
+ {file = "regex-2023.8.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a2ad5add903eb7cdde2b7c64aaca405f3957ab34f16594d2b78d53b8b1a6a7d6"},
+ {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9233ac249b354c54146e392e8a451e465dd2d967fc773690811d3a8c240ac601"},
+ {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920974009fb37b20d32afcdf0227a2e707eb83fe418713f7a8b7de038b870d0b"},
+ {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd2b6c5dfe0929b6c23dde9624483380b170b6e34ed79054ad131b20203a1a63"},
+ {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96979d753b1dc3b2169003e1854dc67bfc86edf93c01e84757927f810b8c3c93"},
+ {file = "regex-2023.8.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ae54a338191e1356253e7883d9d19f8679b6143703086245fb14d1f20196be9"},
+ {file = "regex-2023.8.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2162ae2eb8b079622176a81b65d486ba50b888271302190870b8cc488587d280"},
+ {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c884d1a59e69e03b93cf0dfee8794c63d7de0ee8f7ffb76e5f75be8131b6400a"},
+ {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf9273e96f3ee2ac89ffcb17627a78f78e7516b08f94dc435844ae72576a276e"},
+ {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:83215147121e15d5f3a45d99abeed9cf1fe16869d5c233b08c56cdf75f43a504"},
+ {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f7454aa427b8ab9101f3787eb178057c5250478e39b99540cfc2b889c7d0586"},
+ {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0640913d2c1044d97e30d7c41728195fc37e54d190c5385eacb52115127b882"},
+ {file = "regex-2023.8.8-cp38-cp38-win32.whl", hash = "sha256:0c59122ceccb905a941fb23b087b8eafc5290bf983ebcb14d2301febcbe199c7"},
+ {file = "regex-2023.8.8-cp38-cp38-win_amd64.whl", hash = "sha256:c12f6f67495ea05c3d542d119d270007090bad5b843f642d418eb601ec0fa7be"},
+ {file = "regex-2023.8.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:82cd0a69cd28f6cc3789cc6adeb1027f79526b1ab50b1f6062bbc3a0ccb2dbc3"},
+ {file = "regex-2023.8.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bb34d1605f96a245fc39790a117ac1bac8de84ab7691637b26ab2c5efb8f228c"},
+ {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:987b9ac04d0b38ef4f89fbc035e84a7efad9cdd5f1e29024f9289182c8d99e09"},
+ {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dd6082f4e2aec9b6a0927202c85bc1b09dcab113f97265127c1dc20e2e32495"},
+ {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7eb95fe8222932c10d4436e7a6f7c99991e3fdd9f36c949eff16a69246dee2dc"},
+ {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7098c524ba9f20717a56a8d551d2ed491ea89cbf37e540759ed3b776a4f8d6eb"},
+ {file = "regex-2023.8.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b694430b3f00eb02c594ff5a16db30e054c1b9589a043fe9174584c6efa8033"},
+ {file = "regex-2023.8.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2aeab3895d778155054abea5238d0eb9a72e9242bd4b43f42fd911ef9a13470"},
+ {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:988631b9d78b546e284478c2ec15c8a85960e262e247b35ca5eaf7ee22f6050a"},
+ {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:67ecd894e56a0c6108ec5ab1d8fa8418ec0cff45844a855966b875d1039a2e34"},
+ {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:14898830f0a0eb67cae2bbbc787c1a7d6e34ecc06fbd39d3af5fe29a4468e2c9"},
+ {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f2200e00b62568cfd920127782c61bc1c546062a879cdc741cfcc6976668dfcf"},
+ {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9691a549c19c22d26a4f3b948071e93517bdf86e41b81d8c6ac8a964bb71e5a6"},
+ {file = "regex-2023.8.8-cp39-cp39-win32.whl", hash = "sha256:6ab2ed84bf0137927846b37e882745a827458689eb969028af8032b1b3dac78e"},
+ {file = "regex-2023.8.8-cp39-cp39-win_amd64.whl", hash = "sha256:5543c055d8ec7801901e1193a51570643d6a6ab8751b1f7dd9af71af467538bb"},
+ {file = "regex-2023.8.8.tar.gz", hash = "sha256:fcbdc5f2b0f1cd0f6a56cdb46fe41d2cce1e644e3b68832f3eeebc5fb0f7712e"},
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+description = "Python HTTP for Humans."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+ {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
+]
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+urllib3 = ">=1.21.1,<3"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "requests-aws-sign"
+version = "0.1.5"
+description = "This package provides AWS V4 request signing using the requests library."
+optional = false
+python-versions = "*"
+files = [
+ {file = "requests_aws_sign-0.1.5-py3-none-any.whl", hash = "sha256:a60e8eb6f53a7cfaa8cb7702852dbbfe41f518d5bc6d187e0d82d31e6528da0c"},
+ {file = "requests_aws_sign-0.1.5.tar.gz", hash = "sha256:35f66c4db95ee82427309481a90bd84d1385ae2e32b7537d9d24b1d3acd069ec"},
+]
+
+[package.dependencies]
+boto3 = "*"
+requests = ">=2.0.0"
+
+[[package]]
+name = "requests-oauthlib"
+version = "1.3.1"
+description = "OAuthlib authentication support for Requests."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"},
+ {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"},
+]
+
+[package.dependencies]
+oauthlib = ">=3.0.0"
+requests = ">=2.0.0"
+
+[package.extras]
+rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
+
+[[package]]
+name = "requests-toolbelt"
+version = "1.0.0"
+description = "A utility belt for advanced users of python-requests"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
+ {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
+]
+
+[package.dependencies]
+requests = ">=2.0.1,<3.0.0"
+
+[[package]]
+name = "restrictedpython"
+version = "7.3"
+description = "RestrictedPython is a defined subset of the Python language which allows to provide a program input into a trusted environment."
+optional = false
+python-versions = "<3.13,>=3.7"
+files = [
+ {file = "RestrictedPython-7.3-py3-none-any.whl", hash = "sha256:40a6170bbcfc48b32962831d9281a61608c8e56e7c02fd8e2397225f516a6ed4"},
+ {file = "RestrictedPython-7.3.tar.gz", hash = "sha256:8888304c7858fdcfd86c50b58561797375ba40319d2b6ffb5d24b08b6a2dcd61"},
+]
+
+[package.extras]
+docs = ["Sphinx", "sphinx-rtd-theme"]
+test = ["pytest", "pytest-mock"]
+
+[[package]]
+name = "rfc3986"
+version = "1.5.0"
+description = "Validating URI References per RFC 3986"
+optional = false
+python-versions = "*"
+files = [
+ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
+ {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
+]
+
+[package.dependencies]
+idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
+
+[package.extras]
+idna2008 = ["idna"]
+
+[[package]]
+name = "rich"
+version = "13.7.0"
+description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"},
+ {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"},
+]
+
+[package.dependencies]
+markdown-it-py = ">=2.2.0"
+pygments = ">=2.13.0,<3.0.0"
+typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
+
+[package.extras]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
+
+[[package]]
+name = "rq"
+version = "1.16.1"
+description = "RQ is a simple, lightweight, library for creating background jobs, and processing them."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "rq-1.16.1-py3-none-any.whl", hash = "sha256:273de33f10bb9f18cd1e8ccc0a4e8dba2b8eb86a6ab2a91ae674f99bd68025f1"},
+ {file = "rq-1.16.1.tar.gz", hash = "sha256:d9a6314bc759a743b4a5d89aa467eaa3a31dbbc0a34bcd0ee82e8852d9ec166d"},
+]
+
+[package.dependencies]
+click = ">=5"
+redis = ">=3.5"
+
+[[package]]
+name = "rq-scheduler"
+version = "0.13.1"
+description = "Provides job scheduling capabilities to RQ (Redis Queue)"
+optional = false
+python-versions = "*"
+files = [
+ {file = "rq-scheduler-0.13.1.tar.gz", hash = "sha256:89d6a18f215536362b22c0548db7dbb8678bc520c18dc18a82fd0bb2b91695ce"},
+ {file = "rq_scheduler-0.13.1-py2.py3-none-any.whl", hash = "sha256:c2b19c3aedfc7de4d405183c98aa327506e423bf4cdc556af55aaab9bbe5d1a1"},
+]
+
+[package.dependencies]
+crontab = ">=0.23.0"
+freezegun = "*"
+python-dateutil = "*"
+rq = ">=0.13"
+
+[[package]]
+name = "rsa"
+version = "4.9"
+description = "Pure-Python RSA implementation"
+optional = false
+python-versions = ">=3.6,<4"
+files = [
+ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
+ {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.1.3"
+
+[[package]]
+name = "ruff"
+version = "0.0.289"
+description = "An extremely fast Python linter, written in Rust."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ruff-0.0.289-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:c9a89d748e90c840bac9c37afe90cf13a5bfd460ca02ea93dad9d7bee3af03b4"},
+ {file = "ruff-0.0.289-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:7f7396c6ea01ba332a6ad9d47642bac25d16bd2076aaa595b001f58b2f32ff05"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7180de86c8ecd39624dec1699136f941c07e723201b4ce979bec9e7c67b40ad2"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73f37c65508203dd01a539926375a10243769c20d4fcab3fa6359cd3fbfc54b7"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c14abcd7563b5c80be2dd809eeab20e4aa716bf849860b60a22d87ddf19eb88"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:91b6d63b6b46d4707916472c91baa87aa0592e73f62a80ff55efdf6c0668cfd6"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6479b8c4be3c36046c6c92054762b276fa0fddb03f6b9a310fbbf4c4951267fd"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5424318c254bcb091cb67e140ec9b9f7122074e100b06236f252923fb41e767"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4daa90865796aedcedf0d8897fdd4cd09bf0ddd3504529a4ccf211edcaff3c7d"},
+ {file = "ruff-0.0.289-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8057e8ab0016c13b9419bad119e854f881e687bd96bc5e2d52c8baac0f278a44"},
+ {file = "ruff-0.0.289-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7eebfab2e6a6991908ff1bf82f2dc1e5095fc7e316848e62124526837b445f4d"},
+ {file = "ruff-0.0.289-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ebc7af550018001a7fb39ca22cdce20e1a0de4388ea4a007eb5c822f6188c297"},
+ {file = "ruff-0.0.289-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6e4e6eccb753efe760ba354fc8e9f783f6bba71aa9f592756f5bd0d78db898ed"},
+ {file = "ruff-0.0.289-py3-none-win32.whl", hash = "sha256:bbb3044f931c09cf17dbe5b339896eece0d6ac10c9a86e172540fcdb1974f2b7"},
+ {file = "ruff-0.0.289-py3-none-win_amd64.whl", hash = "sha256:6d043c5456b792be2615a52f16056c3cf6c40506ce1f2d6f9d3083cfcb9eeab6"},
+ {file = "ruff-0.0.289-py3-none-win_arm64.whl", hash = "sha256:04a720bcca5e987426bb14ad8b9c6f55e259ea774da1cbeafe71569744cfd20a"},
+ {file = "ruff-0.0.289.tar.gz", hash = "sha256:2513f853b0fc42f0339b7ab0d2751b63ce7a50a0032d2689b54b2931b3b866d7"},
+]
+
+[[package]]
+name = "s3transfer"
+version = "0.6.2"
+description = "An Amazon S3 Transfer Manager"
+optional = false
+python-versions = ">= 3.7"
+files = [
+ {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"},
+ {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"},
+]
+
+[package.dependencies]
+botocore = ">=1.12.36,<2.0a.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
+
+[[package]]
+name = "sasl"
+version = "0.3.1"
+description = "Cyrus-SASL bindings for Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "sasl-0.3.1.tar.gz", hash = "sha256:0695030b23faa65aab2b462ce6f067d61caeb406de22d1ca7f9253fd9ebe127e"},
+]
+
+[package.dependencies]
+six = "*"
+
+[[package]]
+name = "scramp"
+version = "1.1.0"
+description = "An implementation of the SCRAM protocol."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "scramp-1.1.0-py3-none-any.whl", hash = "sha256:e09d2a9be5adeb94cbeb56fc54a61fc5f5b6e140e679b2b60d1f7a8d6478d906"},
+ {file = "scramp-1.1.0.tar.gz", hash = "sha256:475aa6296deb2737b86e9df9098e8eca0f30c8ad1cc0a8adadb99ef012a5ceba"},
+]
+
+[[package]]
+name = "semver"
+version = "2.8.1"
+description = "Python helper for Semantic Versioning (http://semver.org/)"
+optional = false
+python-versions = "*"
+files = [
+ {file = "semver-2.8.1-py2.py3-none-any.whl", hash = "sha256:41c9aa26c67dc16c54be13074c352ab666bce1fa219c7110e8f03374cd4206b0"},
+ {file = "semver-2.8.1.tar.gz", hash = "sha256:5b09010a66d9a3837211bb7ae5a20d10ba88f8cb49e92cb139a69ef90d5060d8"},
+]
+
+[[package]]
+name = "sentry-sdk"
+version = "1.45.1"
+description = "Python client for Sentry (https://sentry.io)"
+optional = false
+python-versions = "*"
+files = [
+ {file = "sentry_sdk-1.45.1-py2.py3-none-any.whl", hash = "sha256:608887855ccfe39032bfd03936e3a1c4f4fc99b3a4ac49ced54a4220de61c9c1"},
+ {file = "sentry_sdk-1.45.1.tar.gz", hash = "sha256:a16c997c0f4e3df63c0fc5e4207ccb1ab37900433e0f72fef88315d317829a26"},
+]
+
+[package.dependencies]
+certifi = "*"
+urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""}
+
+[package.extras]
+aiohttp = ["aiohttp (>=3.5)"]
+arq = ["arq (>=0.23)"]
+asyncpg = ["asyncpg (>=0.23)"]
+beam = ["apache-beam (>=2.12)"]
+bottle = ["bottle (>=0.12.13)"]
+celery = ["celery (>=3)"]
+celery-redbeat = ["celery-redbeat (>=2)"]
+chalice = ["chalice (>=1.16.0)"]
+clickhouse-driver = ["clickhouse-driver (>=0.2.0)"]
+django = ["django (>=1.8)"]
+falcon = ["falcon (>=1.4)"]
+fastapi = ["fastapi (>=0.79.0)"]
+flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"]
+grpcio = ["grpcio (>=1.21.1)"]
+httpx = ["httpx (>=0.16.0)"]
+huey = ["huey (>=2)"]
+loguru = ["loguru (>=0.5)"]
+openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
+opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
+opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"]
+pure-eval = ["asttokens", "executing", "pure-eval"]
+pymongo = ["pymongo (>=3.1)"]
+pyspark = ["pyspark (>=2.4.4)"]
+quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
+rq = ["rq (>=0.6)"]
+sanic = ["sanic (>=0.8)"]
+sqlalchemy = ["sqlalchemy (>=1.2)"]
+starlette = ["starlette (>=0.19.1)"]
+starlite = ["starlite (>=1.48)"]
+tornado = ["tornado (>=5)"]
+
+[[package]]
+name = "setuptools"
+version = "70.0.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"},
+ {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
+name = "simple-salesforce"
+version = "0.74.3"
+description = "A basic Salesforce.com REST API client."
+optional = false
+python-versions = "*"
+files = [
+ {file = "simple_salesforce-0.74.3-py2.py3-none-any.whl", hash = "sha256:0bf4065a7769388d8f830bfc31200e6d2d6de50d19034e4113b59831dd72a438"},
+]
+
+[package.dependencies]
+requests = {version = "*", extras = ["security"]}
+
+[[package]]
+name = "simplejson"
+version = "3.19.2"
+description = "Simple, fast, extensible JSON encoder/decoder for Python"
+optional = false
+python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "simplejson-3.19.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3471e95110dcaf901db16063b2e40fb394f8a9e99b3fe9ee3acc6f6ef72183a2"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3194cd0d2c959062b94094c0a9f8780ffd38417a5322450a0db0ca1a23e7fbd2"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8a390e56a7963e3946ff2049ee1eb218380e87c8a0e7608f7f8790ba19390867"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1537b3dd62d8aae644f3518c407aa8469e3fd0f179cdf86c5992792713ed717a"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a8617625369d2d03766413bff9e64310feafc9fc4f0ad2b902136f1a5cd8c6b0"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:2c433a412e96afb9a3ce36fa96c8e61a757af53e9c9192c97392f72871e18e69"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f1c70249b15e4ce1a7d5340c97670a95f305ca79f376887759b43bb33288c973"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:287e39ba24e141b046812c880f4619d0ca9e617235d74abc27267194fc0c7835"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6f0a0b41dd05eefab547576bed0cf066595f3b20b083956b1405a6f17d1be6ad"},
+ {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f98d918f7f3aaf4b91f2b08c0c92b1774aea113334f7cde4fe40e777114dbe6"},
+ {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d74beca677623481810c7052926365d5f07393c72cbf62d6cce29991b676402"},
+ {file = "simplejson-3.19.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f2398361508c560d0bf1773af19e9fe644e218f2a814a02210ac2c97ad70db0"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ad331349b0b9ca6da86064a3599c425c7a21cd41616e175ddba0866da32df48"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:332c848f02d71a649272b3f1feccacb7e4f7e6de4a2e6dc70a32645326f3d428"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25785d038281cd106c0d91a68b9930049b6464288cea59ba95b35ee37c2d23a5"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18955c1da6fc39d957adfa346f75226246b6569e096ac9e40f67d102278c3bcb"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:11cc3afd8160d44582543838b7e4f9aa5e97865322844b75d51bf4e0e413bb3e"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b01fda3e95d07a6148702a641e5e293b6da7863f8bc9b967f62db9461330562c"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:778331444917108fa8441f59af45886270d33ce8a23bfc4f9b192c0b2ecef1b3"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9eb117db8d7ed733a7317c4215c35993b815bf6aeab67523f1f11e108c040672"},
+ {file = "simplejson-3.19.2-cp310-cp310-win32.whl", hash = "sha256:39b6d79f5cbfa3eb63a869639cfacf7c41d753c64f7801efc72692c1b2637ac7"},
+ {file = "simplejson-3.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:5675e9d8eeef0aa06093c1ff898413ade042d73dc920a03e8cea2fb68f62445a"},
+ {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed628c1431100b0b65387419551e822987396bee3c088a15d68446d92f554e0c"},
+ {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adcb3332979cbc941b8fff07181f06d2b608625edc0a4d8bc3ffc0be414ad0c4"},
+ {file = "simplejson-3.19.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08889f2f597ae965284d7b52a5c3928653a9406d88c93e3161180f0abc2433ba"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7938a78447174e2616be223f496ddccdbf7854f7bf2ce716dbccd958cc7d13"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a970a2e6d5281d56cacf3dc82081c95c1f4da5a559e52469287457811db6a79b"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554313db34d63eac3b3f42986aa9efddd1a481169c12b7be1e7512edebff8eaf"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d36081c0b1c12ea0ed62c202046dca11438bee48dd5240b7c8de8da62c620e9"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a3cd18e03b0ee54ea4319cdcce48357719ea487b53f92a469ba8ca8e39df285e"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66e5dc13bfb17cd6ee764fc96ccafd6e405daa846a42baab81f4c60e15650414"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:972a7833d4a1fcf7a711c939e315721a88b988553fc770a5b6a5a64bd6ebeba3"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3e74355cb47e0cd399ead3477e29e2f50e1540952c22fb3504dda0184fc9819f"},
+ {file = "simplejson-3.19.2-cp311-cp311-win32.whl", hash = "sha256:1dd4f692304854352c3e396e9b5f0a9c9e666868dd0bdc784e2ac4c93092d87b"},
+ {file = "simplejson-3.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:9300aee2a8b5992d0f4293d88deb59c218989833e3396c824b69ba330d04a589"},
+ {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b8d940fd28eb34a7084877747a60873956893e377f15a32ad445fe66c972c3b8"},
+ {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4969d974d9db826a2c07671273e6b27bc48e940738d768fa8f33b577f0978378"},
+ {file = "simplejson-3.19.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c594642d6b13d225e10df5c16ee15b3398e21a35ecd6aee824f107a625690374"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f5a398b5e77bb01b23d92872255e1bcb3c0c719a3be40b8df146570fe7781a"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176a1b524a3bd3314ed47029a86d02d5a95cc0bee15bd3063a1e1ec62b947de6"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3c7363a8cb8c5238878ec96c5eb0fc5ca2cb11fc0c7d2379863d342c6ee367a"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346820ae96aa90c7d52653539a57766f10f33dd4be609206c001432b59ddf89f"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de9a2792612ec6def556d1dc621fd6b2073aff015d64fba9f3e53349ad292734"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1c768e7584c45094dca4b334af361e43b0aaa4844c04945ac7d43379eeda9bc2"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:9652e59c022e62a5b58a6f9948b104e5bb96d3b06940c6482588176f40f4914b"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9c1a4393242e321e344213a90a1e3bf35d2f624aa8b8f6174d43e3c6b0e8f6eb"},
+ {file = "simplejson-3.19.2-cp312-cp312-win32.whl", hash = "sha256:7cb98be113911cb0ad09e5523d0e2a926c09a465c9abb0784c9269efe4f95917"},
+ {file = "simplejson-3.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:6779105d2fcb7fcf794a6a2a233787f6bbd4731227333a072d8513b252ed374f"},
+ {file = "simplejson-3.19.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:061e81ea2d62671fa9dea2c2bfbc1eec2617ae7651e366c7b4a2baf0a8c72cae"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4280e460e51f86ad76dc456acdbfa9513bdf329556ffc8c49e0200878ca57816"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11c39fbc4280d7420684494373b7c5904fa72a2b48ef543a56c2d412999c9e5d"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bccb3e88ec26ffa90f72229f983d3a5d1155e41a1171190fa723d4135523585b"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb5b50dc6dd671eb46a605a3e2eb98deb4a9af787a08fcdddabe5d824bb9664"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d94245caa3c61f760c4ce4953cfa76e7739b6f2cbfc94cc46fff6c050c2390c5"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d0e5ffc763678d48ecc8da836f2ae2dd1b6eb2d27a48671066f91694e575173c"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d222a9ed082cd9f38b58923775152003765016342a12f08f8c123bf893461f28"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8434dcdd347459f9fd9c526117c01fe7ca7b016b6008dddc3c13471098f4f0dc"},
+ {file = "simplejson-3.19.2-cp36-cp36m-win32.whl", hash = "sha256:c9ac1c2678abf9270e7228133e5b77c6c3c930ad33a3c1dfbdd76ff2c33b7b50"},
+ {file = "simplejson-3.19.2-cp36-cp36m-win_amd64.whl", hash = "sha256:92c4a4a2b1f4846cd4364855cbac83efc48ff5a7d7c06ba014c792dd96483f6f"},
+ {file = "simplejson-3.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0d551dc931638e2102b8549836a1632e6e7cf620af3d093a7456aa642bff601d"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73a8a4653f2e809049999d63530180d7b5a344b23a793502413ad1ecea9a0290"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40847f617287a38623507d08cbcb75d51cf9d4f9551dd6321df40215128325a3"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be893258d5b68dd3a8cba8deb35dc6411db844a9d35268a8d3793b9d9a256f80"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9eb3cff1b7d71aa50c89a0536f469cb8d6dcdd585d8f14fb8500d822f3bdee4"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d0f402e787e6e7ee7876c8b05e2fe6464820d9f35ba3f172e95b5f8b699f6c7f"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbbcc6b0639aa09b9649f36f1bcb347b19403fe44109948392fbb5ea69e48c3e"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2fc697be37585eded0c8581c4788fcfac0e3f84ca635b73a5bf360e28c8ea1a2"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b0a3eb6dd39cce23801a50c01a0976971498da49bc8a0590ce311492b82c44b"},
+ {file = "simplejson-3.19.2-cp37-cp37m-win32.whl", hash = "sha256:49f9da0d6cd17b600a178439d7d2d57c5ef01f816b1e0e875e8e8b3b42db2693"},
+ {file = "simplejson-3.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c87c22bd6a987aca976e3d3e23806d17f65426191db36d40da4ae16a6a494cbc"},
+ {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e4c166f743bb42c5fcc60760fb1c3623e8fda94f6619534217b083e08644b46"},
+ {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0a48679310e1dd5c9f03481799311a65d343748fe86850b7fb41df4e2c00c087"},
+ {file = "simplejson-3.19.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0521e0f07cb56415fdb3aae0bbd8701eb31a9dfef47bb57206075a0584ab2a2"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2d5119b1d7a1ed286b8af37357116072fc96700bce3bec5bb81b2e7057ab41"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c1467d939932901a97ba4f979e8f2642415fcf02ea12f53a4e3206c9c03bc17"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49aaf4546f6023c44d7e7136be84a03a4237f0b2b5fb2b17c3e3770a758fc1a0"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60848ab779195b72382841fc3fa4f71698a98d9589b0a081a9399904487b5832"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0436a70d8eb42bea4fe1a1c32d371d9bb3b62c637969cb33970ad624d5a3336a"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49e0e3faf3070abdf71a5c80a97c1afc059b4f45a5aa62de0c2ca0444b51669b"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ff836cd4041e16003549449cc0a5e372f6b6f871eb89007ab0ee18fb2800fded"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3848427b65e31bea2c11f521b6fc7a3145d6e501a1038529da2391aff5970f2f"},
+ {file = "simplejson-3.19.2-cp38-cp38-win32.whl", hash = "sha256:3f39bb1f6e620f3e158c8b2eaf1b3e3e54408baca96a02fe891794705e788637"},
+ {file = "simplejson-3.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:0405984f3ec1d3f8777c4adc33eac7ab7a3e629f3b1c05fdded63acc7cf01137"},
+ {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:445a96543948c011a3a47c8e0f9d61e9785df2544ea5be5ab3bc2be4bd8a2565"},
+ {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a8c3cc4f9dfc33220246760358c8265dad6e1104f25f0077bbca692d616d358"},
+ {file = "simplejson-3.19.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af9c7e6669c4d0ad7362f79cb2ab6784d71147503e62b57e3d95c4a0f222c01c"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:064300a4ea17d1cd9ea1706aa0590dcb3be81112aac30233823ee494f02cb78a"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9453419ea2ab9b21d925d0fd7e3a132a178a191881fab4169b6f96e118cc25bb"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e038c615b3906df4c3be8db16b3e24821d26c55177638ea47b3f8f73615111c"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16ca9c90da4b1f50f089e14485db8c20cbfff2d55424062791a7392b5a9b3ff9"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1018bd0d70ce85f165185d2227c71e3b1e446186f9fa9f971b69eee223e1e3cd"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e8dd53a8706b15bc0e34f00e6150fbefb35d2fd9235d095b4f83b3c5ed4fa11d"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d022b14d7758bfb98405672953fe5c202ea8a9ccf9f6713c5bd0718eba286fd"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:febffa5b1eda6622d44b245b0685aff6fb555ce0ed734e2d7b1c3acd018a2cff"},
+ {file = "simplejson-3.19.2-cp39-cp39-win32.whl", hash = "sha256:4edcd0bf70087b244ba77038db23cd98a1ace2f91b4a3ecef22036314d77ac23"},
+ {file = "simplejson-3.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:aad7405c033d32c751d98d3a65801e2797ae77fac284a539f6c3a3e13005edc4"},
+ {file = "simplejson-3.19.2-py3-none-any.whl", hash = "sha256:bcedf4cae0d47839fee7de344f96b5694ca53c786f28b5f773d4f0b265a159eb"},
+ {file = "simplejson-3.19.2.tar.gz", hash = "sha256:9eb442a2442ce417801c912df68e1f6ccfcd41577ae7274953ab3ad24ef7d82c"},
+]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.0"
+description = "Sniff out which async library your code is running under"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
+ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
+]
+
+[[package]]
+name = "snowflake-connector-python"
+version = "3.12.0"
+description = "Snowflake Connector for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "snowflake_connector_python-3.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:edf28df8be24845cfcec653b160d2b8c048d5cb0c85b051f4957f0b0aae1e493"},
+ {file = "snowflake_connector_python-3.12.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:c2bbdbbb028d7d542815ed68b28200728aa6707b9354e3a447fdc8c7a34bcdce"},
+ {file = "snowflake_connector_python-3.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92c9a19a23033df709e63baa6ccdf6eff65210143a8c9c67a0a24bba862034b"},
+ {file = "snowflake_connector_python-3.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d33d845e4c68d33e73a9f64100b53342c18607ac25c4f2a27dbed2078078d12"},
+ {file = "snowflake_connector_python-3.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:c1d43bfaa885aab712f14f9ced232abe5023adfca7fbf7a7a0768a162523e9d6"},
+ {file = "snowflake_connector_python-3.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6a0cc03fb44808f3ddc464ee272f141564c8daea14475e1df5c2a54c7acb2ddf"},
+ {file = "snowflake_connector_python-3.12.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:564752d22accc43351b50f676b03aa9f2b441be2641e3cf9a7790faf54eff210"},
+ {file = "snowflake_connector_python-3.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27d6a1a180832c7b551d38df1094a70fb79917f90c57893b9ce7e219362f6c1"},
+ {file = "snowflake_connector_python-3.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60675fd83022daef40541d717d006695149c512b283e35741b61a4f48ba537e9"},
+ {file = "snowflake_connector_python-3.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a567b937b0179d1e95a8ad7200943d286f38d0e76df90af10f747ed9149dd681"},
+ {file = "snowflake_connector_python-3.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dc333fcfc383a8cab8bd7e890a7c76703e26598925a05954c75d2c50bff06071"},
+ {file = "snowflake_connector_python-3.12.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:3c06bfba4a329fd4ec3feba0ada7b31f86ed4e156a9766bced52c2814d001fd2"},
+ {file = "snowflake_connector_python-3.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acf84b07dd2f22adfaa7d52ccd6be1722bd5a0e2b1a9b08681c3851bea05768f"},
+ {file = "snowflake_connector_python-3.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:019b8a61e5af689451d502df2af8793fc6f20b5b0a3548fd8ad03aa8b62e7f2d"},
+ {file = "snowflake_connector_python-3.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:45f9b9678694f10571c1f7ec7d0d741663ad0ff61a71ae53aa71be47faa19978"},
+ {file = "snowflake_connector_python-3.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:21cbaef51fbed719de01155079df3d004cee963d3723c1ebdb8980923f893e04"},
+ {file = "snowflake_connector_python-3.12.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:c86d4a7d49f42ea0bb34218cb49c401ba995892abcfb509ea749cd0a74a8b28a"},
+ {file = "snowflake_connector_python-3.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aa34aec0f96d7fc7271e38c68ee0d58529875d05e084afb4fc8f09b694643c4"},
+ {file = "snowflake_connector_python-3.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2f621030b26a220711c64518e00059736b79c1da53afa6a8ce68b31c1941014"},
+ {file = "snowflake_connector_python-3.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:368e46f1d079056e028bfe8f7171fabef62eb00bcf590df294220b7a5be5d56c"},
+ {file = "snowflake_connector_python-3.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2735e16fffded0900f7484030613b79699afc1ed4e5cff086bd139a0ce965594"},
+ {file = "snowflake_connector_python-3.12.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:c06a8e2e12284b4a4d462d0073fb4983e90ad2d6a2382926f9e3409f06c81d0b"},
+ {file = "snowflake_connector_python-3.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:880e6e95171cd7374a86da14132fdfc4b622665f134561f4d43e3f35bdacf67d"},
+ {file = "snowflake_connector_python-3.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e245b84c164433454ce49d78e6bcf5c2e62e25657358bf34ab533166e588f80"},
+ {file = "snowflake_connector_python-3.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:85a5565b8813d164f33f32a825a70443008fe009aae050307f128a1ca892f9ed"},
+ {file = "snowflake_connector_python-3.12.0.tar.gz", hash = "sha256:320e0b6f8cd8556e19c8b87249c931700238b2958313afc7a33108d67da87d82"},
+]
+
+[package.dependencies]
+asn1crypto = ">0.24.0,<2.0.0"
+certifi = ">=2017.4.17"
+cffi = ">=1.9,<2.0.0"
+charset-normalizer = ">=2,<4"
+cryptography = ">=3.1.0,<43.0.0"
+filelock = ">=3.5,<4"
+idna = ">=2.5,<4"
+packaging = "*"
+platformdirs = ">=2.6.0,<5.0.0"
+pyjwt = "<3.0.0"
+pyOpenSSL = ">=16.2.0,<25.0.0"
+pytz = "*"
+requests = "<3.0.0"
+sortedcontainers = ">=2.4.0"
+tomlkit = "*"
+typing-extensions = ">=4.3,<5"
+urllib3 = {version = ">=1.21.1,<2.0.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+development = ["Cython", "coverage", "more-itertools", "numpy (<1.27.0)", "pendulum (!=2.1.1)", "pexpect", "pytest (<7.5.0)", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist", "pytzdata"]
+pandas = ["pandas (>=1.0.0,<3.0.0)", "pyarrow"]
+secure-local-storage = ["keyring (>=23.1.0,<26.0.0)"]
+
+[[package]]
+name = "sortedcontainers"
+version = "2.4.0"
+description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
+optional = false
+python-versions = "*"
+files = [
+ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
+ {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
+]
+
+[[package]]
+name = "sqlalchemy"
+version = "1.3.24"
+description = "Database Abstraction Library"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "SQLAlchemy-1.3.24-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:87a2725ad7d41cd7376373c15fd8bf674e9c33ca56d0b8036add2d634dba372e"},
+ {file = "SQLAlchemy-1.3.24-cp27-cp27m-win32.whl", hash = "sha256:f597a243b8550a3a0b15122b14e49d8a7e622ba1c9d29776af741f1845478d79"},
+ {file = "SQLAlchemy-1.3.24-cp27-cp27m-win_amd64.whl", hash = "sha256:fc4cddb0b474b12ed7bdce6be1b9edc65352e8ce66bc10ff8cbbfb3d4047dbf4"},
+ {file = "SQLAlchemy-1.3.24-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:f1149d6e5c49d069163e58a3196865e4321bad1803d7886e07d8710de392c548"},
+ {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:14f0eb5db872c231b20c18b1e5806352723a3a89fb4254af3b3e14f22eaaec75"},
+ {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:e98d09f487267f1e8d1179bf3b9d7709b30a916491997137dd24d6ae44d18d79"},
+ {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:fc1f2a5a5963e2e73bac4926bdaf7790c4d7d77e8fc0590817880e22dd9d0b8b"},
+ {file = "SQLAlchemy-1.3.24-cp35-cp35m-win32.whl", hash = "sha256:f3c5c52f7cb8b84bfaaf22d82cb9e6e9a8297f7c2ed14d806a0f5e4d22e83fb7"},
+ {file = "SQLAlchemy-1.3.24-cp35-cp35m-win_amd64.whl", hash = "sha256:0352db1befcbed2f9282e72843f1963860bf0e0472a4fa5cf8ee084318e0e6ab"},
+ {file = "SQLAlchemy-1.3.24-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2ed6343b625b16bcb63c5b10523fd15ed8934e1ed0f772c534985e9f5e73d894"},
+ {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:34fcec18f6e4b24b4a5f6185205a04f1eab1e56f8f1d028a2a03694ebcc2ddd4"},
+ {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e47e257ba5934550d7235665eee6c911dc7178419b614ba9e1fbb1ce6325b14f"},
+ {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:816de75418ea0953b5eb7b8a74933ee5a46719491cd2b16f718afc4b291a9658"},
+ {file = "SQLAlchemy-1.3.24-cp36-cp36m-win32.whl", hash = "sha256:26155ea7a243cbf23287f390dba13d7927ffa1586d3208e0e8d615d0c506f996"},
+ {file = "SQLAlchemy-1.3.24-cp36-cp36m-win_amd64.whl", hash = "sha256:f03bd97650d2e42710fbe4cf8a59fae657f191df851fc9fc683ecef10746a375"},
+ {file = "SQLAlchemy-1.3.24-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a006d05d9aa052657ee3e4dc92544faae5fcbaafc6128217310945610d862d39"},
+ {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1e2f89d2e5e3c7a88e25a3b0e43626dba8db2aa700253023b82e630d12b37109"},
+ {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0d5d862b1cfbec5028ce1ecac06a3b42bc7703eb80e4b53fceb2738724311443"},
+ {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0172423a27fbcae3751ef016663b72e1a516777de324a76e30efa170dbd3dd2d"},
+ {file = "SQLAlchemy-1.3.24-cp37-cp37m-win32.whl", hash = "sha256:d37843fb8df90376e9e91336724d78a32b988d3d20ab6656da4eb8ee3a45b63c"},
+ {file = "SQLAlchemy-1.3.24-cp37-cp37m-win_amd64.whl", hash = "sha256:c10ff6112d119f82b1618b6dc28126798481b9355d8748b64b9b55051eb4f01b"},
+ {file = "SQLAlchemy-1.3.24-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:861e459b0e97673af6cc5e7f597035c2e3acdfb2608132665406cded25ba64c7"},
+ {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5de2464c254380d8a6c20a2746614d5a436260be1507491442cf1088e59430d2"},
+ {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d375d8ccd3cebae8d90270f7aa8532fe05908f79e78ae489068f3b4eee5994e8"},
+ {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:014ea143572fee1c18322b7908140ad23b3994036ef4c0d630110faf942652f8"},
+ {file = "SQLAlchemy-1.3.24-cp38-cp38-win32.whl", hash = "sha256:6607ae6cd3a07f8a4c3198ffbf256c261661965742e2b5265a77cd5c679c9bba"},
+ {file = "SQLAlchemy-1.3.24-cp38-cp38-win_amd64.whl", hash = "sha256:fcb251305fa24a490b6a9ee2180e5f8252915fb778d3dafc70f9cc3f863827b9"},
+ {file = "SQLAlchemy-1.3.24-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01aa5f803db724447c1d423ed583e42bf5264c597fd55e4add4301f163b0be48"},
+ {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4d0e3515ef98aa4f0dc289ff2eebb0ece6260bbf37c2ea2022aad63797eacf60"},
+ {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:bce28277f308db43a6b4965734366f533b3ff009571ec7ffa583cb77539b84d6"},
+ {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8110e6c414d3efc574543109ee618fe2c1f96fa31833a1ff36cc34e968c4f233"},
+ {file = "SQLAlchemy-1.3.24-cp39-cp39-win32.whl", hash = "sha256:ee5f5188edb20a29c1cc4a039b074fdc5575337c9a68f3063449ab47757bb064"},
+ {file = "SQLAlchemy-1.3.24-cp39-cp39-win_amd64.whl", hash = "sha256:09083c2487ca3c0865dc588e07aeaa25416da3d95f7482c07e92f47e080aa17b"},
+ {file = "SQLAlchemy-1.3.24.tar.gz", hash = "sha256:ebbb777cbf9312359b897bf81ba00dae0f5cb69fba2a18265dcc18a6f5ef7519"},
+]
+
+[package.extras]
+mssql = ["pyodbc"]
+mssql-pymssql = ["pymssql"]
+mssql-pyodbc = ["pyodbc"]
+mysql = ["mysqlclient"]
+oracle = ["cx-oracle"]
+postgresql = ["psycopg2"]
+postgresql-pg8000 = ["pg8000 (<1.16.6)"]
+postgresql-psycopg2binary = ["psycopg2-binary"]
+postgresql-psycopg2cffi = ["psycopg2cffi"]
+pymysql = ["pymysql", "pymysql (<1)"]
+
+[[package]]
+name = "sqlalchemy-searchable"
+version = "1.2.0"
+description = "Provides fulltext search capabilities for declarative SQLAlchemy models."
+optional = false
+python-versions = "*"
+files = [
+ {file = "SQLAlchemy-Searchable-1.2.0.tar.gz", hash = "sha256:597de9d1356e8a0a8b3be7be892adee422e7419603f25c40a7ab5c16bd75f77d"},
+]
+
+[package.dependencies]
+SQLAlchemy = ">=0.9.0"
+SQLAlchemy-Utils = ">=0.29.0"
+validators = ">=0.3.0"
+
+[package.extras]
+test = ["flake8 (>=2.4.0)", "isort (>=3.9.6)", "psycopg2 (>=2.4.6)", "pytest (>=2.2.3)"]
+
+[[package]]
+name = "sqlalchemy-utils"
+version = "0.38.3"
+description = "Various utility functions for SQLAlchemy."
+optional = false
+python-versions = "~=3.6"
+files = [
+ {file = "SQLAlchemy-Utils-0.38.3.tar.gz", hash = "sha256:9f9afba607a40455cf703adfa9846584bf26168a0c5a60a70063b70d65051f4d"},
+ {file = "SQLAlchemy_Utils-0.38.3-py3-none-any.whl", hash = "sha256:5c13b5d08adfaa85f3d4e8ec09a75136216fad41346980d02974a70a77988bf9"},
+]
+
+[package.dependencies]
+SQLAlchemy = ">=1.3"
+
+[package.extras]
+arrow = ["arrow (>=0.3.4)"]
+babel = ["Babel (>=1.3)"]
+color = ["colour (>=0.0.4)"]
+encrypted = ["cryptography (>=0.6)"]
+intervals = ["intervals (>=0.7.1)"]
+password = ["passlib (>=1.6,<2.0)"]
+pendulum = ["pendulum (>=2.0.5)"]
+phone = ["phonenumbers (>=5.9.2)"]
+test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
+test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
+timezone = ["python-dateutil"]
+url = ["furl (>=0.4.1)"]
+
+[[package]]
+name = "sqlparse"
+version = "0.5.0"
+description = "A non-validating SQL parser."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"},
+ {file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"},
+]
+
+[package.extras]
+dev = ["build", "hatch"]
+doc = ["sphinx"]
+
+[[package]]
+name = "sshtunnel"
+version = "0.1.5"
+description = "Pure python SSH tunnels"
+optional = false
+python-versions = "*"
+files = [
+ {file = "sshtunnel-0.1.5-py2.py3-none-any.whl", hash = "sha256:5eee2e414c3fd9e9ef5d058bebece272a6aae928849ef7f2d9561b7fffab7aea"},
+ {file = "sshtunnel-0.1.5.tar.gz", hash = "sha256:c813fdcda8e81c3936ffeac47cb69cfb2d1f5e77ad0de656c6dab56aeebd9249"},
+]
+
+[package.dependencies]
+paramiko = ">=1.15.2"
+
+[package.extras]
+build-sphinx = ["sphinx", "sphinxcontrib-napoleon"]
+dev = ["check-manifest"]
+test = ["tox (>=1.8.1)"]
+
+[[package]]
+name = "statsd"
+version = "3.3.0"
+description = "A simple statsd client."
+optional = false
+python-versions = "*"
+files = [
+ {file = "statsd-3.3.0-py2.py3-none-any.whl", hash = "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa"},
+ {file = "statsd-3.3.0.tar.gz", hash = "sha256:e3e6db4c246f7c59003e51c9720a51a7f39a396541cb9b147ff4b14d15b5dd1f"},
+]
+
+[[package]]
+name = "supervisor"
+version = "4.1.0"
+description = "A system for controlling process state under UNIX"
+optional = false
+python-versions = "*"
+files = [
+ {file = "supervisor-4.1.0-py2.py3-none-any.whl", hash = "sha256:a76b2f77a560f2dc411c0254a4eb15f555e99faac48621b0f1fc9ab013944f47"},
+ {file = "supervisor-4.1.0.tar.gz", hash = "sha256:2dc86fe0476e945e61483d614ceb2cf4f93b95282eb243bdf792621994360383"},
+]
+
+[package.extras]
+testing = ["mock", "pytest", "pytest-cov"]
+
+[[package]]
+name = "supervisor-checks"
+version = "0.8.1"
+description = "Framework to build health checks for Supervisor-based services."
+optional = false
+python-versions = "*"
+files = [
+ {file = "supervisor_checks-0.8.1.tar.gz", hash = "sha256:1474150aed0acdea726cc9ffdf6b728e2ed8aa8ef89d8d979cd2fb8f4444d987"},
+]
+
+[package.dependencies]
+psutil = "*"
+
+[package.extras]
+test = ["psutil"]
+
+[[package]]
+name = "td-client"
+version = "1.0.0"
+description = "Treasure Data API library for Python"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "td-client-1.0.0.tar.gz", hash = "sha256:48842cb4b22dd7fece4e1d2593985b9a151796cfff504bf2de24d04dafbea1ab"},
+ {file = "td_client-1.0.0-py3-none-any.whl", hash = "sha256:017dc6d21eb44e8ef5f3a9e0e313a37760e83dc0df3684b49d29cb73fdac89d6"},
+]
+
+[package.dependencies]
+msgpack = ">=0.5.2"
+python-dateutil = "*"
+urllib3 = "*"
+
+[package.extras]
+dev = ["black (==19.3b0)", "isort"]
+docs = ["sphinx", "sphinx-rtd-theme"]
+
+[[package]]
+name = "tenacity"
+version = "8.2.3"
+description = "Retry code until it succeeds"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"},
+ {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"},
+]
+
+[package.extras]
+doc = ["reno", "sphinx", "tornado (>=4.5)"]
+
+[[package]]
+name = "thrift"
+version = "0.16.0"
+description = "Python bindings for the Apache Thrift RPC system"
+optional = false
+python-versions = "*"
+files = [
+ {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
+]
+
+[package.dependencies]
+six = ">=1.7.2"
+
+[package.extras]
+all = ["tornado (>=4.0)", "twisted"]
+tornado = ["tornado (>=4.0)"]
+twisted = ["twisted"]
+
+[[package]]
+name = "thrift-sasl"
+version = "0.4.3"
+description = "Thrift SASL Python module that implements SASL transports for Thrift (`TSaslClientTransport`)."
+optional = false
+python-versions = "*"
+files = [
+ {file = "thrift_sasl-0.4.3-py2.py3-none-any.whl", hash = "sha256:d24b49140115e6e2a96d08335cff225a27a28ea71866fb1b2bdb30ca5afca64e"},
+ {file = "thrift_sasl-0.4.3.tar.gz", hash = "sha256:5bdd5b760d90a13d9b3abfce873db0425861aa8d6bf25912d3cc0467a4f773da"},
+]
+
+[package.dependencies]
+pure-sasl = ">=0.6.2"
+six = ">=1.13.0"
+thrift = {version = ">=0.10.0", markers = "python_version >= \"3.0\""}
+
+[[package]]
+name = "thriftpy2"
+version = "0.4.17"
+description = "Pure python implementation of Apache Thrift."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "thriftpy2-0.4.17-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:9692cbf2025e1e20d0cb30d759589c8c13f4fa6806a38af13970c28a3bb8a2f9"},
+ {file = "thriftpy2-0.4.17.tar.gz", hash = "sha256:190f35c32da9146d1fdd822f46b6a0ad543572ea405ca6853b4ec7b128efbc0d"},
+]
+
+[package.dependencies]
+ply = ">=3.4,<4.0"
+six = ">=1.15,<2.0"
+
+[package.extras]
+dev = ["flake8 (>=2.5)", "pytest (>=2.8)", "pytest (>=6.1.1)", "sphinx (>=1.3)", "sphinx-rtd-theme (>=0.1.9)", "tornado (>=4.0,<6.0)"]
+tornado = ["tornado (>=4.0,<6.0)"]
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+
+[[package]]
+name = "tomlkit"
+version = "0.13.0"
+description = "Style preserving TOML library"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"},
+ {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"},
+]
+
+[[package]]
+name = "trino"
+version = "0.327.0"
+description = "Client for the Trino distributed SQL Engine"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "trino-0.327.0-py3-none-any.whl", hash = "sha256:56d253a814bd5da545cc68e1bc8c28c0b80f07df6411aa0424197c025c78998e"},
+ {file = "trino-0.327.0.tar.gz", hash = "sha256:07370044158cb95f6f6b03720a1afb8980b75092a59025fe602af9858c2fd4a0"},
+]
+
+[package.dependencies]
+"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""}
+python-dateutil = "*"
+pytz = "*"
+requests = ">=2.31.0"
+tzlocal = "*"
+
+[package.extras]
+all = ["requests-kerberos", "sqlalchemy (>=1.3)"]
+external-authentication-token-cache = ["keyring"]
+kerberos = ["requests-kerberos"]
+sqlalchemy = ["sqlalchemy (>=1.3)"]
+tests = ["black", "httpretty (<1.1)", "isort", "pre-commit", "pytest", "pytest-runner", "requests-kerberos", "sqlalchemy (>=1.3)"]
+
+[[package]]
+name = "typing-extensions"
+version = "4.9.0"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
+ {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
+]
+
+[[package]]
+name = "tzdata"
+version = "2023.4"
+description = "Provider of IANA time zone data"
+optional = false
+python-versions = ">=2"
+files = [
+ {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"},
+ {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"},
+]
+
+[[package]]
+name = "tzlocal"
+version = "4.3.1"
+description = "tzinfo object for the local timezone"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tzlocal-4.3.1-py3-none-any.whl", hash = "sha256:67d7e7f4ce0a98e9dfde2e02474c60fe846ed032d78b555c554c2e9cba472d84"},
+ {file = "tzlocal-4.3.1.tar.gz", hash = "sha256:ee32ef8c20803c19a96ed366addd3d4a729ef6309cb5c7359a0cc2eeeb7fa46a"},
+]
+
+[package.dependencies]
+"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""}
+pytz-deprecation-shim = "*"
+tzdata = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"]
+
+[[package]]
+name = "ua-parser"
+version = "0.18.0"
+description = "Python port of Browserscope's user agent parser"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ua-parser-0.18.0.tar.gz", hash = "sha256:db51f1b59bfaa82ed9e2a1d99a54d3e4153dddf99ac1435d51828165422e624e"},
+ {file = "ua_parser-0.18.0-py2.py3-none-any.whl", hash = "sha256:9d94ac3a80bcb0166823956a779186c746b50ea4c9fd9bf30fdb758553c38950"},
+]
+
+[[package]]
+name = "uritemplate"
+version = "3.0.1"
+description = "URI templates"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"},
+ {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"},
+]
+
+[[package]]
+name = "urllib3"
+version = "1.26.19"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"},
+ {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"},
+]
+
+[package.extras]
+brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+
+[[package]]
+name = "user-agents"
+version = "2.0"
+description = "A library to identify devices (phones, tablets) and their capabilities by parsing (browser/HTTP) user agent strings"
+optional = false
+python-versions = "*"
+files = [
+ {file = "user-agents-2.0.tar.gz", hash = "sha256:792869b990a244f71efea1cb410ecaba99a270a64c5ac37d365bde5d70d6a2fa"},
+ {file = "user_agents-2.0-py2-none-any.whl", hash = "sha256:7af7419d61ce8f72ad487cc77ea3b4d1dcaa4cbb7b6df533a7c7aa3ccc44e5b3"},
+]
+
+[package.dependencies]
+ua-parser = ">=0.8.0"
+
+[[package]]
+name = "validators"
+version = "0.22.0"
+description = "Python Data Validation for Humans™"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "validators-0.22.0-py3-none-any.whl", hash = "sha256:61cf7d4a62bbae559f2e54aed3b000cea9ff3e2fdbe463f51179b92c58c9585a"},
+ {file = "validators-0.22.0.tar.gz", hash = "sha256:77b2689b172eeeb600d9605ab86194641670cdb73b60afd577142a9397873370"},
+]
+
+[package.extras]
+docs-offline = ["myst-parser (>=2.0.0)", "pypandoc-binary (>=1.11)", "sphinx (>=7.1.1)"]
+docs-online = ["mkdocs (>=1.5.2)", "mkdocs-git-revision-date-localized-plugin (>=1.2.0)", "mkdocs-material (>=9.2.6)", "mkdocstrings[python] (>=0.22.0)", "pyaml (>=23.7.0)"]
+hooks = ["pre-commit (>=3.3.3)"]
+package = ["build (>=1.0.0)", "twine (>=4.0.2)"]
+runner = ["tox (>=4.11.1)"]
+sast = ["bandit[toml] (>=1.7.5)"]
+testing = ["pytest (>=7.4.0)"]
+tooling = ["black (>=23.7.0)", "pyright (>=1.1.325)", "ruff (>=0.0.287)"]
+tooling-extras = ["pyaml (>=23.7.0)", "pypandoc-binary (>=1.11)", "pytest (>=7.4.0)"]
+
+[[package]]
+name = "vertica-python"
+version = "1.1.1"
+description = "Official native Python client for the Vertica database."
+optional = false
+python-versions = "*"
+files = [
+ {file = "vertica-python-1.1.1.tar.gz", hash = "sha256:dedf56d76b67673b4d57a13f7f96ebdc57b39ea650b93ebf0c05eb6d1d2c0c05"},
+ {file = "vertica_python-1.1.1-py2.py3-none-any.whl", hash = "sha256:63d300832d6fe471987880f06a9590eafc46a1f896860881270f6b6645f3bec6"},
+]
+
+[package.dependencies]
+python-dateutil = ">=1.5"
+six = ">=1.10.0"
+
+[[package]]
+name = "virtualenv"
+version = "20.25.0"
+description = "Virtual Python Environment builder"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"},
+ {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"},
+]
+
+[package.dependencies]
+distlib = ">=0.3.7,<1"
+filelock = ">=3.12.2,<4"
+platformdirs = ">=3.9.1,<5"
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
+test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
+
+[[package]]
+name = "watchdog"
+version = "3.0.0"
+description = "Filesystem events monitoring"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"},
+ {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"},
+ {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"},
+ {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"},
+ {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"},
+ {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"},
+ {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"},
+ {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"},
+ {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"},
+ {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"},
+ {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"},
+ {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"},
+ {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"},
+ {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"},
+ {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"},
+ {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"},
+ {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"},
+ {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"},
+ {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"},
+ {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"},
+]
+
+[package.extras]
+watchmedo = ["PyYAML (>=3.10)"]
+
+[[package]]
+name = "wcwidth"
+version = "0.2.13"
+description = "Measures the displayed width of unicode strings in a terminal"
+optional = false
+python-versions = "*"
+files = [
+ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
+ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
+]
+
+[[package]]
+name = "websocket-client"
+version = "1.7.0"
+description = "WebSocket client for Python with low level API options"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"},
+ {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"},
+]
+
+[package.extras]
+docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"]
+optional = ["python-socks", "wsaccel"]
+test = ["websockets"]
+
+[[package]]
+name = "werkzeug"
+version = "2.3.8"
+description = "The comprehensive WSGI web application library."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "werkzeug-2.3.8-py3-none-any.whl", hash = "sha256:bba1f19f8ec89d4d607a3bd62f1904bd2e609472d93cd85e9d4e178f472c3748"},
+ {file = "werkzeug-2.3.8.tar.gz", hash = "sha256:554b257c74bbeb7a0d254160a4f8ffe185243f52a52035060b761ca62d977f03"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.1.1"
+
+[package.extras]
+watchdog = ["watchdog (>=2.3)"]
+
+[[package]]
+name = "wrapt"
+version = "1.16.0"
+description = "Module for decorators, wrappers and monkey patching."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
+ {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
+ {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"},
+ {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"},
+ {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"},
+ {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"},
+ {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"},
+ {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"},
+ {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"},
+ {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"},
+ {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"},
+ {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"},
+ {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"},
+ {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"},
+ {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"},
+ {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"},
+ {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"},
+ {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"},
+ {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"},
+ {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"},
+ {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"},
+ {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"},
+ {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"},
+ {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"},
+ {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"},
+ {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"},
+ {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"},
+ {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"},
+ {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"},
+ {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"},
+ {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"},
+ {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"},
+ {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"},
+ {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"},
+ {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"},
+ {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"},
+ {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"},
+ {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"},
+ {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"},
+ {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"},
+ {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"},
+ {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"},
+ {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"},
+ {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"},
+ {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"},
+ {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"},
+ {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"},
+ {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"},
+ {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"},
+ {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"},
+ {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"},
+ {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"},
+ {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"},
+ {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"},
+ {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"},
+ {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"},
+ {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"},
+ {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"},
+ {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"},
+ {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"},
+ {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"},
+ {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"},
+ {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"},
+ {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"},
+ {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"},
+ {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"},
+ {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"},
+ {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"},
+ {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"},
+ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"},
+]
+
+[[package]]
+name = "wraptor"
+version = "0.7.0"
+description = "Useful decorators and other utility functions."
+optional = false
+python-versions = "*"
+files = [
+ {file = "Wraptor-0.7.0.tar.gz", hash = "sha256:d61182866a061fb29b7ec426db281cc9a15540766885136f35809f079d9c1dec"},
+]
+
+[[package]]
+name = "wtforms"
+version = "2.2.1"
+description = "A flexible forms validation and rendering library for Python web development."
+optional = false
+python-versions = "*"
+files = [
+ {file = "WTForms-2.2.1-py2.py3-none-any.whl", hash = "sha256:e3ee092c827582c50877cdbd49e9ce6d2c5c1f6561f849b3b068c1b8029626f1"},
+ {file = "WTForms-2.2.1.tar.gz", hash = "sha256:0cdbac3e7f6878086c334aa25dc5a33869a3954e9d1e015130d65a69309b3b61"},
+]
+
+[package.extras]
+locale = ["Babel (>=1.3)"]
+
+[[package]]
+name = "xlrd"
+version = "2.0.1"
+description = "Library for developers to extract data from Microsoft Excel (tm) .xls spreadsheet files"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+files = [
+ {file = "xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd"},
+ {file = "xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88"},
+]
+
+[package.extras]
+build = ["twine", "wheel"]
+docs = ["sphinx"]
+test = ["pytest", "pytest-cov"]
+
+[[package]]
+name = "xlsxwriter"
+version = "1.2.2"
+description = "A Python module for creating Excel XLSX files."
+optional = false
+python-versions = "*"
+files = [
+ {file = "XlsxWriter-1.2.2-py2.py3-none-any.whl", hash = "sha256:00e9c337589ec67a69f1220f47409146ab1affd8eb1e8eaad23f35685bd23e47"},
+ {file = "XlsxWriter-1.2.2.tar.gz", hash = "sha256:5a5e2195a4672d17db79839bbdf1006a521adb57eaceea1c335ae4b3d19f088f"},
+]
+
+[[package]]
+name = "xmlschema"
+version = "3.0.1"
+description = "An XML Schema validator and decoder"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "xmlschema-3.0.1-py3-none-any.whl", hash = "sha256:116243b2ad38cd2df9ee0606d4e4e898a6f156736b39ab0017e6f49862c0809e"},
+ {file = "xmlschema-3.0.1.tar.gz", hash = "sha256:bb24a5f4738e49d85d9eb03a2b5af26bbbbfdb055517ad953d98925094b8c026"},
+]
+
+[package.dependencies]
+elementpath = ">=4.1.5,<5.0.0"
+
+[package.extras]
+codegen = ["elementpath (>=4.1.5,<5.0.0)", "jinja2"]
+dev = ["Sphinx", "coverage", "elementpath (>=4.1.5,<5.0.0)", "flake8", "jinja2", "lxml", "lxml-stubs", "memory-profiler", "mypy", "sphinx-rtd-theme", "tox"]
+docs = ["Sphinx", "elementpath (>=4.1.5,<5.0.0)", "jinja2", "sphinx-rtd-theme"]
+
+[[package]]
+name = "zipp"
+version = "3.19.1"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"},
+ {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"},
+]
+
+[package.extras]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+
+[[package]]
+name = "zope-event"
+version = "5.0"
+description = "Very basic event publishing system"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"},
+ {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[package.extras]
+docs = ["Sphinx"]
+test = ["zope.testrunner"]
+
+[[package]]
+name = "zope-interface"
+version = "6.1"
+description = "Interfaces for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "zope.interface-6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43b576c34ef0c1f5a4981163b551a8781896f2a37f71b8655fd20b5af0386abb"},
+ {file = "zope.interface-6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67be3ca75012c6e9b109860820a8b6c9a84bfb036fbd1076246b98e56951ca92"},
+ {file = "zope.interface-6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b9bc671626281f6045ad61d93a60f52fd5e8209b1610972cf0ef1bbe6d808e3"},
+ {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe81def9cf3e46f16ce01d9bfd8bea595e06505e51b7baf45115c77352675fd"},
+ {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dc998f6de015723196a904045e5a2217f3590b62ea31990672e31fbc5370b41"},
+ {file = "zope.interface-6.1-cp310-cp310-win_amd64.whl", hash = "sha256:239a4a08525c080ff833560171d23b249f7f4d17fcbf9316ef4159f44997616f"},
+ {file = "zope.interface-6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ffdaa5290422ac0f1688cb8adb1b94ca56cee3ad11f29f2ae301df8aecba7d1"},
+ {file = "zope.interface-6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34c15ca9248f2e095ef2e93af2d633358c5f048c49fbfddf5fdfc47d5e263736"},
+ {file = "zope.interface-6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b012d023b4fb59183909b45d7f97fb493ef7a46d2838a5e716e3155081894605"},
+ {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97806e9ca3651588c1baaebb8d0c5ee3db95430b612db354c199b57378312ee8"},
+ {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddbab55a2473f1d3b8833ec6b7ac31e8211b0aa608df5ab09ce07f3727326de"},
+ {file = "zope.interface-6.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0da79117952a9a41253696ed3e8b560a425197d4e41634a23b1507efe3273f1"},
+ {file = "zope.interface-6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8bb9c990ca9027b4214fa543fd4025818dc95f8b7abce79d61dc8a2112b561a"},
+ {file = "zope.interface-6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51b64432eed4c0744241e9ce5c70dcfecac866dff720e746d0a9c82f371dfa7"},
+ {file = "zope.interface-6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa6fd016e9644406d0a61313e50348c706e911dca29736a3266fc9e28ec4ca6d"},
+ {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c8cf55261e15590065039696607f6c9c1aeda700ceee40c70478552d323b3ff"},
+ {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e30506bcb03de8983f78884807e4fd95d8db6e65b69257eea05d13d519b83ac0"},
+ {file = "zope.interface-6.1-cp312-cp312-win_amd64.whl", hash = "sha256:e33e86fd65f369f10608b08729c8f1c92ec7e0e485964670b4d2633a4812d36b"},
+ {file = "zope.interface-6.1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:2f8d89721834524a813f37fa174bac074ec3d179858e4ad1b7efd4401f8ac45d"},
+ {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13b7d0f2a67eb83c385880489dbb80145e9d344427b4262c49fbf2581677c11c"},
+ {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef43ee91c193f827e49599e824385ec7c7f3cd152d74cb1dfe02cb135f264d83"},
+ {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e441e8b7d587af0414d25e8d05e27040d78581388eed4c54c30c0c91aad3a379"},
+ {file = "zope.interface-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89b28772fc2562ed9ad871c865f5320ef761a7fcc188a935e21fe8b31a38ca9"},
+ {file = "zope.interface-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70d2cef1bf529bff41559be2de9d44d47b002f65e17f43c73ddefc92f32bf00f"},
+ {file = "zope.interface-6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad54ed57bdfa3254d23ae04a4b1ce405954969c1b0550cc2d1d2990e8b439de1"},
+ {file = "zope.interface-6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef467d86d3cfde8b39ea1b35090208b0447caaabd38405420830f7fd85fbdd56"},
+ {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af47f10cfc54c2ba2d825220f180cc1e2d4914d783d6fc0cd93d43d7bc1c78b"},
+ {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9559138690e1bd4ea6cd0954d22d1e9251e8025ce9ede5d0af0ceae4a401e43"},
+ {file = "zope.interface-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:964a7af27379ff4357dad1256d9f215047e70e93009e532d36dcb8909036033d"},
+ {file = "zope.interface-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:387545206c56b0315fbadb0431d5129c797f92dc59e276b3ce82db07ac1c6179"},
+ {file = "zope.interface-6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57d0a8ce40ce440f96a2c77824ee94bf0d0925e6089df7366c2272ccefcb7941"},
+ {file = "zope.interface-6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ebc4d34e7620c4f0da7bf162c81978fce0ea820e4fa1e8fc40ee763839805f3"},
+ {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a804abc126b33824a44a7aa94f06cd211a18bbf31898ba04bd0924fbe9d282d"},
+ {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f294a15f7723fc0d3b40701ca9b446133ec713eafc1cc6afa7b3d98666ee1ac"},
+ {file = "zope.interface-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a41f87bb93b8048fe866fa9e3d0c51e27fe55149035dcf5f43da4b56732c0a40"},
+ {file = "zope.interface-6.1.tar.gz", hash = "sha256:2fdc7ccbd6eb6b7df5353012fbed6c3c5d04ceaca0038f75e601060e95345309"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[package.extras]
+docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"]
+test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
+testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = ">=3.8,<3.11"
+content-hash = "f674647ac7245ed97ec3fb18dd104d8e6cee18eaa46f0ad1295780fc0b381afd"
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000..456376f819
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,177 @@
+[project]
+requires-python = ">=3.8"
+
+[tool.black]
+target-version = ['py38']
+line-length = 119
+force-exclude = '''
+/(
+ migrations
+)/
+'''
+
+[tool.poetry]
+name = "redash"
+version = "24.10.0-dev"
+description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
+authors = ["Arik Fraimovich "]
+# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
+maintainers = [
+ "Redash maintainers and contributors ",
+]
+readme = "README.md"
+
+[tool.poetry.dependencies]
+python = ">=3.8,<3.11"
+advocate = "1.0.0"
+aniso8601 = "8.0.0"
+authlib = "0.15.5"
+backoff = "2.2.1"
+blinker = "1.6.2"
+click = "8.1.3"
+cryptography = "42.0.8"
+disposable-email-domains = ">=0.0.52"
+flask = "2.3.2"
+flask-limiter = "3.3.1"
+flask-login = "0.6.0"
+flask-mail = "0.9.1"
+flask-migrate = "2.5.2"
+flask-restful = "0.3.10"
+flask-sqlalchemy = "2.5.1"
+flask-talisman = "0.7.0"
+flask-wtf = "1.1.1"
+funcy = "1.13"
+gevent = "23.9.1"
+greenlet = "2.0.2"
+gunicorn = "22.0.0"
+httplib2 = "0.19.0"
+itsdangerous = "2.1.2"
+jinja2 = "3.1.4"
+jsonschema = "3.1.1"
+markupsafe = "2.1.1"
+maxminddb-geolite2 = "2018.703"
+parsedatetime = "2.4"
+passlib = "1.7.3"
+psycopg2-binary = "2.9.6"
+pyjwt = "2.4.0"
+pyopenssl = "24.2.1"
+pypd = "1.1.0"
+pysaml2 = "7.3.1"
+pystache = "0.6.0"
+python-dateutil = "2.8.0"
+python-dotenv = "0.19.2"
+pytz = ">=2019.3"
+pyyaml = "6.0.1"
+redis = "4.6.0"
+regex = "2023.8.8"
+requests = "2.32.3"
+restrictedpython = "7.3"
+rq = "1.16.1"
+rq-scheduler = "0.13.1"
+semver = "2.8.1"
+sentry-sdk = "1.45.1"
+sqlalchemy = "1.3.24"
+sqlalchemy-searchable = "1.2.0"
+sqlalchemy-utils = "0.38.3"
+sqlparse = "0.5.0"
+sshtunnel = "0.1.5"
+statsd = "3.3.0"
+supervisor = "4.1.0"
+supervisor-checks = "0.8.1"
+ua-parser = "0.18.0"
+urllib3 = "1.26.19"
+user-agents = "2.0"
+werkzeug = "2.3.8"
+wtforms = "2.2.1"
+xlsxwriter = "1.2.2"
+tzlocal = "4.3.1"
+pyodbc = "5.1.0"
+
+[tool.poetry.group.all_ds]
+optional = true
+
+[tool.poetry.group.all_ds.dependencies]
+atsd-client = "3.0.5"
+azure-kusto-data = "0.0.35"
+boto3 = "1.28.8"
+botocore = "1.31.8"
+cassandra-driver = "3.21.0"
+certifi = ">=2019.9.11"
+cmem-cmempy = "21.2.3"
+databend-py = "0.4.6"
+databend-sqlalchemy = "0.2.4"
+google-api-python-client = "1.7.11"
+gspread = "5.11.2"
+impyla = "0.16.0"
+influxdb = "5.2.3"
+influxdb-client = "1.38.0"
+memsql = "3.2.0"
+mysqlclient = "2.1.1"
+nzalchemy = "^11.0.2"
+nzpy = ">=1.15"
+oauth2client = "4.1.3"
+openpyxl = "3.0.7"
+oracledb = "2.1.2"
+pandas = "1.3.4"
+phoenixdb = "0.7"
+pinotdb = ">=0.4.5"
+protobuf = "3.20.2"
+pyathena = ">=1.5.0,<=1.11.5"
+pydgraph = "2.0.2"
+pydruid = "0.5.7"
+pyexasol = "0.12.0"
+pyhive = "0.6.1"
+pyignite = "0.6.1"
+pymongo = { version = "4.6.3", extras = ["srv", "tls"] }
+pymssql = "^2.3.1"
+pyodbc = "5.1.0"
+python-arango = "6.1.0"
+python-rapidjson = "1.20"
+requests-aws-sign = "0.1.5"
+sasl = ">=0.1.3"
+simple-salesforce = "0.74.3"
+snowflake-connector-python = "3.12.0"
+td-client = "1.0.0"
+thrift = ">=0.8.0"
+thrift-sasl = ">=0.1.0"
+trino = ">=0.305,<1.0"
+vertica-python = "1.1.1"
+xlrd = "2.0.1"
+e6data-python-connector = "1.1.9"
+
+[tool.poetry.group.ldap3]
+optional = true
+
+[tool.poetry.group.ldap3.dependencies]
+ldap3 = "2.9.1"
+
+[tool.poetry.group.dev]
+optional = true
+
+[tool.poetry.group.dev.dependencies]
+pytest = "7.4.0"
+coverage = "7.2.7"
+freezegun = "1.2.1"
+jwcrypto = "1.5.6"
+mock = "5.0.2"
+pre-commit = "3.3.3"
+ptpython = "3.0.23"
+ptvsd = "4.3.2"
+pytest-cov = "4.1.0"
+watchdog = "3.0.0"
+ruff = "0.0.289"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.ruff]
+exclude = [".git", "viz-lib", "node_modules", "migrations"]
+ignore = ["E501"]
+select = ["C9", "E", "F", "W", "I001", "UP004"]
+
+[tool.ruff.mccabe]
+max-complexity = 15
+
+[tool.ruff.per-file-ignores]
+"__init__.py" = ["F401"]
diff --git a/pytest.ini b/pytest.ini
index fdafc074e5..0c12c50c8c 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -1,2 +1,5 @@
[pytest]
norecursedirs = *.egg .eggs dist build docs .tox
+filterwarnings =
+ once::DeprecationWarning
+ once::PendingDeprecationWarning
diff --git a/redash/__init__.py b/redash/__init__.py
index 42724fe076..2e45a50bcc 100644
--- a/redash/__init__.py
+++ b/redash/__init__.py
@@ -1,19 +1,18 @@
-from __future__ import absolute_import
import logging
import os
import sys
import redis
-from flask_mail import Mail
from flask_limiter import Limiter
-from flask_limiter.util import get_ipaddr
+from flask_limiter.util import get_remote_address
+from flask_mail import Mail
from flask_migrate import Migrate
from statsd import StatsClient
-from . import settings
-from .app import create_app # noqa
-from .query_runner import import_query_runners
-from .destinations import import_destinations
+from redash import settings
+from redash.app import create_app # noqa
+from redash.destinations import import_destinations
+from redash.query_runner import import_query_runners
__version__ = "10.1.0"
@@ -48,10 +47,8 @@ def setup_logging():
rq_redis_connection = redis.from_url(settings.RQ_REDIS_URL)
mail = Mail()
migrate = Migrate(compare_type=True)
-statsd_client = StatsClient(
- host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX
-)
-limiter = Limiter(key_func=get_ipaddr, storage_uri=settings.LIMITER_STORAGE)
+statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
+limiter = Limiter(key_func=get_remote_address, storage_uri=settings.LIMITER_STORAGE)
import_query_runners(settings.QUERY_RUNNERS)
import_destinations(settings.DESTINATIONS)
diff --git a/redash/app.py b/redash/app.py
index 67eee53bbe..437f766c87 100644
--- a/redash/app.py
+++ b/redash/app.py
@@ -1,7 +1,7 @@
from flask import Flask
from werkzeug.middleware.proxy_fix import ProxyFix
-from . import settings
+from redash import settings
class Redash(Flask):
@@ -25,7 +25,6 @@ def __init__(self, *args, **kwargs):
def create_app():
from . import (
authentication,
- extensions,
handlers,
limiter,
mail,
@@ -37,14 +36,10 @@ def create_app():
from .metrics import request as request_metrics
from .models import db, users
from .utils import sentry
- from .version_check import reset_new_version_status
sentry.init()
app = Redash()
- # Check and update the cached version for use by the client
- app.before_first_request(reset_new_version_status)
-
security.init_app(app)
request_metrics.init_app(app)
db.init_app(app)
@@ -54,7 +49,6 @@ def create_app():
limiter.init_app(app)
handlers.init_app(app)
configure_webpack(app)
- extensions.init_app(app)
users.init_app(app)
tasks.init_app(app)
diff --git a/redash/authentication/__init__.py b/redash/authentication/__init__.py
index 966919b89d..8521ac5bb9 100644
--- a/redash/authentication/__init__.py
+++ b/redash/authentication/__init__.py
@@ -5,8 +5,11 @@
from datetime import timedelta
from urllib.parse import urlsplit, urlunsplit
-from flask import jsonify, redirect, request, url_for, session
+from flask import jsonify, redirect, request, session, url_for
from flask_login import LoginManager, login_user, logout_user, user_logged_in
+from sqlalchemy.orm.exc import NoResultFound
+from werkzeug.exceptions import Unauthorized
+
from redash import models, settings
from redash.authentication import jwt_auth
from redash.authentication.org_resolving import current_org
@@ -26,9 +29,7 @@ def get_login_url(external=False, next="/", is_redirect_to_login=False):
if settings.MULTI_ORG and current_org == None:
login_url = "/"
elif settings.MULTI_ORG:
- login_url = url_for(
- "redash.login", org_slug=current_org.slug, next=next, _external=external
- )
+ login_url = url_for("redash.login", org_slug=current_org.slug, next=next, _external=external)
else:
logger.info("getting login URL")
host_url = settings.HOST
@@ -78,11 +79,7 @@ def request_loader(request):
elif settings.AUTH_TYPE == "api_key":
user = api_key_load_user_from_request(request)
else:
- logger.warning(
- "Unknown authentication type ({}). Using default (HMAC).".format(
- settings.AUTH_TYPE
- )
- )
+ logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
user = hmac_load_user_from_request(request)
if org_settings["auth_jwt_login_enabled"] and user is None:
@@ -209,6 +206,10 @@ def jwt_token_load_user_from_request(request):
if not payload:
return
+ if "email" not in payload:
+ logger.info("No email field in token, refusing to login")
+ return
+
try:
user = models.User.get_by_email_and_org(payload["email"], org)
except models.NoResultFound:
@@ -252,7 +253,7 @@ def redirect_to_login():
def logout_and_redirect_to_index():
logout_user()
- if settings.MULTI_ORG and current_org == None:
+ if settings.MULTI_ORG and current_org == None: # noqa: E711
index_url = "/"
elif settings.MULTI_ORG:
index_url = url_for("redash.index", org_slug=current_org.slug, _external=False)
diff --git a/redash/authentication/account.py b/redash/authentication/account.py
index c826a71aa4..30ec3a26b2 100644
--- a/redash/authentication/account.py
+++ b/redash/authentication/account.py
@@ -1,13 +1,12 @@
import logging
+
from flask import render_template
+from itsdangerous import URLSafeTimedSerializer
from redash import settings
from redash.tasks import send_mail
from redash.utils import base_url
-# noinspection PyUnresolvedReferences
-from itsdangerous import URLSafeTimedSerializer, SignatureExpired, BadSignature
-
logger = logging.getLogger(__name__)
serializer = URLSafeTimedSerializer(settings.SECRET_KEY)
diff --git a/redash/authentication/google_oauth.py b/redash/authentication/google_oauth.py
index a1f58d3fcf..31f6d52359 100644
--- a/redash/authentication/google_oauth.py
+++ b/redash/authentication/google_oauth.py
@@ -3,11 +3,15 @@
from flask import redirect, url_for, Blueprint, flash, request, session
-from redash import models, settings
+import requests
+from authlib.integrations.flask_client import OAuth
+from flask import Blueprint, flash, redirect, request, session, url_for
+
+from redash import models
from redash.authentication import (
create_and_login_user,
- logout_and_redirect_to_index,
get_next_path,
+ logout_and_redirect_to_index,
)
from redash.authentication.org_resolving import current_org
diff --git a/redash/authentication/jwt_auth.py b/redash/authentication/jwt_auth.py
index 81904235ae..a59029b6d2 100644
--- a/redash/authentication/jwt_auth.py
+++ b/redash/authentication/jwt_auth.py
@@ -1,10 +1,39 @@
+import json
import logging
+
import jwt
import requests
-import simplejson
logger = logging.getLogger("jwt_auth")
+FILE_SCHEME_PREFIX = "file://"
+
+
+def get_public_key_from_file(url):
+ file_path = url[len(FILE_SCHEME_PREFIX) :]
+ with open(file_path) as key_file:
+ key_str = key_file.read()
+
+ get_public_keys.key_cache[url] = [key_str]
+ return key_str
+
+
+def get_public_key_from_net(url):
+ r = requests.get(url)
+ r.raise_for_status()
+ data = r.json()
+ if "keys" in data:
+ public_keys = []
+ for key_dict in data["keys"]:
+ public_key = jwt.algorithms.RSAAlgorithm.from_jwk(json.dumps(key_dict))
+ public_keys.append(public_key)
+
+ get_public_keys.key_cache[url] = public_keys
+ return public_keys
+ else:
+ get_public_keys.key_cache[url] = data
+ return data
+
def get_public_keys(url):
"""
@@ -12,33 +41,21 @@ def get_public_keys(url):
List of RSA public keys usable by PyJWT.
"""
key_cache = get_public_keys.key_cache
+ keys = {}
if url in key_cache:
- return key_cache[url]
+ keys = key_cache[url]
else:
- r = requests.get(url)
- r.raise_for_status()
- data = r.json()
- if "keys" in data:
- public_keys = []
- for key_dict in data["keys"]:
- public_key = jwt.algorithms.RSAAlgorithm.from_jwk(
- simplejson.dumps(key_dict)
- )
- public_keys.append(public_key)
-
- get_public_keys.key_cache[url] = public_keys
- return public_keys
+ if url.startswith(FILE_SCHEME_PREFIX):
+ keys = [get_public_key_from_file(url)]
else:
- get_public_keys.key_cache[url] = data
- return data
+ keys = get_public_key_from_net(url)
+ return keys
get_public_keys.key_cache = {}
-def verify_jwt_token(
- jwt_token, expected_issuer, expected_audience, algorithms, public_certs_url
-):
+def verify_jwt_token(jwt_token, expected_issuer, expected_audience, algorithms, public_certs_url):
# https://developers.cloudflare.com/access/setting-up-access/validate-jwt-tokens/
# https://cloud.google.com/iap/docs/signed-headers-howto
# Loop through the keys since we can't pass the key set to the decoder
@@ -53,9 +70,7 @@ def verify_jwt_token(
for key in keys:
try:
# decode returns the claims which has the email if you need it
- payload = jwt.decode(
- jwt_token, key=key, audience=expected_audience, algorithms=algorithms
- )
+ payload = jwt.decode(jwt_token, key=key, audience=expected_audience, algorithms=algorithms)
issuer = payload["iss"]
if issuer != expected_issuer:
raise Exception("Wrong issuer: {}".format(issuer))
@@ -63,4 +78,5 @@ def verify_jwt_token(
break
except Exception as e:
logging.exception(e)
+
return payload, valid_token
diff --git a/redash/authentication/ldap_auth.py b/redash/authentication/ldap_auth.py
index e102b3f516..3bc5ff272f 100644
--- a/redash/authentication/ldap_auth.py
+++ b/redash/authentication/ldap_auth.py
@@ -1,23 +1,24 @@
import logging
import sys
-from redash import settings
-
-from flask import flash, redirect, render_template, request, url_for, Blueprint
+from flask import Blueprint, flash, redirect, render_template, request, url_for
from flask_login import current_user
+from redash import settings
+
try:
- from ldap3 import Server, Connection
+ from ldap3 import Connection, Server
+ from ldap3.utils.conv import escape_filter_chars
except ImportError:
if settings.LDAP_LOGIN_ENABLED:
sys.exit(
- "The ldap3 library was not found. This is required to use LDAP authentication (see requirements.txt)."
+ "The ldap3 library was not found. This is required to use LDAP authentication. Rebuild the Docker image installing the `ldap3` poetry dependency group."
)
from redash.authentication import (
create_and_login_user,
- logout_and_redirect_to_index,
get_next_path,
+ logout_and_redirect_to_index,
)
from redash.authentication.org_resolving import current_org
from redash.handlers.base import org_scoped_rule
@@ -69,6 +70,7 @@ def login(org_slug=None):
def auth_ldap_user(username, password):
+ clean_username = escape_filter_chars(username)
server = Server(settings.LDAP_HOST_URL, use_ssl=settings.LDAP_SSL)
if settings.LDAP_BIND_DN is not None:
conn = Connection(
@@ -83,7 +85,7 @@ def auth_ldap_user(username, password):
conn.search(
settings.LDAP_SEARCH_DN,
- settings.LDAP_SEARCH_TEMPLATE % {"username": username},
+ settings.LDAP_SEARCH_TEMPLATE % {"username": clean_username},
attributes=[settings.LDAP_DISPLAY_NAME_KEY, settings.LDAP_EMAIL_KEY],
)
diff --git a/redash/authentication/remote_user_auth.py b/redash/authentication/remote_user_auth.py
index 7cba295ccd..59f25fb07c 100644
--- a/redash/authentication/remote_user_auth.py
+++ b/redash/authentication/remote_user_auth.py
@@ -1,13 +1,15 @@
import logging
-from flask import redirect, url_for, Blueprint, request
+
+from flask import Blueprint, redirect, request, url_for
+
+from redash import settings
from redash.authentication import (
create_and_login_user,
- logout_and_redirect_to_index,
get_next_path,
+ logout_and_redirect_to_index,
)
from redash.authentication.org_resolving import current_org
from redash.handlers.base import org_scoped_rule
-from redash import settings
logger = logging.getLogger("remote_user_auth")
@@ -20,9 +22,7 @@ def login(org_slug=None):
next_path = get_next_path(unsafe_next_path)
if not settings.REMOTE_USER_LOGIN_ENABLED:
- logger.error(
- "Cannot use remote user for login without being enabled in settings"
- )
+ logger.error("Cannot use remote user for login without being enabled in settings")
return redirect(url_for("redash.index", next=next_path, org_slug=org_slug))
email = request.headers.get(settings.REMOTE_USER_HEADER)
diff --git a/redash/authentication/saml_auth.py b/redash/authentication/saml_auth.py
index c8632e0e52..1c52bf1a3c 100644
--- a/redash/authentication/saml_auth.py
+++ b/redash/authentication/saml_auth.py
@@ -1,16 +1,20 @@
import logging
-from flask import flash, redirect, url_for, Blueprint, request
-from redash import settings
-from redash.authentication import create_and_login_user, logout_and_redirect_to_index
-from redash.authentication.org_resolving import current_org
-from redash.handlers.base import org_scoped_rule
-from redash.utils import mustache_render
+
+from flask import Blueprint, flash, redirect, request, url_for
from saml2 import BINDING_HTTP_POST, BINDING_HTTP_REDIRECT, entity
from saml2.client import Saml2Client
from saml2.config import Config as Saml2Config
from saml2.saml import NAMEID_FORMAT_TRANSIENT
from saml2.sigver import get_xmlsec_binary
+from redash import settings
+from redash.authentication import (
+ create_and_login_user,
+ logout_and_redirect_to_index,
+)
+from redash.authentication.org_resolving import current_org
+from redash.handlers.base import org_scoped_rule
+from redash.utils import mustache_render
logger = logging.getLogger("saml_auth")
blueprint = Blueprint("saml_auth", __name__)
@@ -29,6 +33,7 @@ def get_saml_client(org):
sso_url = org.get_setting("auth_saml_sso_url")
x509_cert = org.get_setting("auth_saml_x509_cert")
metadata_url = org.get_setting("auth_saml_metadata_url")
+ sp_settings = org.get_setting("auth_saml_sp_settings")
if settings.SAML_SCHEME_OVERRIDE:
acs_url = url_for(
@@ -85,8 +90,13 @@ def get_saml_client(org):
saml_settings["metadata"] = {"inline": [metadata_inline]}
- if acs_url is not None and acs_url != "":
- saml_settings["entityid"] = acs_url
+ if entity_id is not None and entity_id != "":
+ saml_settings["entityid"] = entity_id
+
+ if sp_settings:
+ import json
+
+ saml_settings["service"]["sp"].update(json.loads(sp_settings))
sp_config = Saml2Config()
sp_config.load(saml_settings)
diff --git a/redash/cli/__init__.py b/redash/cli/__init__.py
index 6f0528695c..5bf40d4a8e 100644
--- a/redash/cli/__init__.py
+++ b/redash/cli/__init__.py
@@ -1,17 +1,25 @@
+import json
+
import click
-import simplejson
from flask import current_app
from flask.cli import FlaskGroup, run_command, with_appcontext
from rq import Connection
-from redash import __version__, create_app, settings, rq_redis_connection
-from redash.cli import data_sources, database, groups, organization, queries, users, rq
+from redash import __version__, create_app, rq_redis_connection, settings
+from redash.cli import (
+ data_sources,
+ database,
+ groups,
+ organization,
+ queries,
+ rq,
+ users,
+)
from redash.monitor import get_status
-def create(group):
+def create():
app = current_app or create_app()
- group.app = app
@app.shell_context_processor
def shell_context():
@@ -46,7 +54,7 @@ def version():
@manager.command()
def status():
with Connection(rq_redis_connection):
- print(simplejson.dumps(get_status(), indent=2))
+ print(json.dumps(get_status(), indent=2))
@manager.command()
@@ -62,25 +70,23 @@ def send_test_mail(email=None):
"""
Send test message to EMAIL (default: the address you defined in MAIL_DEFAULT_SENDER)
"""
- from redash import mail
from flask_mail import Message
+ from redash import mail
+
if email is None:
email = settings.MAIL_DEFAULT_SENDER
- mail.send(
- Message(
- subject="Test Message from Redash", recipients=[email], body="Test message."
- )
- )
+ mail.send(Message(subject="Test Message from Redash", recipients=[email], body="Test message."))
@manager.command("shell")
@with_appcontext
def shell():
import sys
- from ptpython import repl
+
from flask.globals import _app_ctx_stack
+ from ptpython import repl
app = _app_ctx_stack.top.app
diff --git a/redash/cli/data_sources.py b/redash/cli/data_sources.py
index 4a94bfd12d..ccc7429a20 100644
--- a/redash/cli/data_sources.py
+++ b/redash/cli/data_sources.py
@@ -1,6 +1,7 @@
from sys import exit
import click
+from click.types import convert_type
from flask.cli import AppGroup
from sqlalchemy.orm.exc import NoResultFound
@@ -33,14 +34,10 @@ def list_command(organization=None):
if i > 0:
print("-" * 20)
- print(
- "Id: {}\nName: {}\nType: {}\nOptions: {}".format(
- ds.id, ds.name, ds.type, ds.options.to_json()
- )
- )
+ print("Id: {}\nName: {}\nType: {}\nOptions: {}".format(ds.id, ds.name, ds.type, ds.options.to_json()))
-@manager.command()
+@manager.command(name="list_types")
def list_types():
print("Enabled Query Runners:")
types = sorted(query_runners.keys())
@@ -75,9 +72,7 @@ def test(name, organization="default"):
data_source = models.DataSource.query.filter(
models.DataSource.name == name, models.DataSource.org == org
).one()
- print(
- "Testing connection to data source: {} (id={})".format(name, data_source.id)
- )
+ print("Testing connection to data source: {} (id={})".format(name, data_source.id))
try:
data_source.query_runner.test_connection()
except Exception as e:
@@ -139,11 +134,19 @@ def new(name=None, type=None, options=None, organization="default"):
else:
prompt = "{} (optional)".format(prompt)
+ _type = types[prop["type"]]
+
+ def value_proc(value):
+ if value == default_value:
+ return default_value
+ return convert_type(_type, default_value)(value)
+
value = click.prompt(
prompt,
default=default_value,
- type=types[prop["type"]],
+ type=_type,
show_default=False,
+ value_proc=value_proc,
)
if value != default_value:
options_obj[k] = value
@@ -154,13 +157,9 @@ def new(name=None, type=None, options=None, organization="default"):
if not options.is_valid():
print("Error: invalid configuration.")
- exit()
+ exit(1)
- print(
- "Creating {} data source ({}) with options:\n{}".format(
- type, name, options.to_json()
- )
- )
+ print("Creating {} data source ({}) with options:\n{}".format(type, name, options.to_json()))
data_source = models.DataSource.create_with_group(
name=name,
diff --git a/redash/cli/database.py b/redash/cli/database.py
index 546e813a91..ef1d4adbe9 100644
--- a/redash/cli/database.py
+++ b/redash/cli/database.py
@@ -1,9 +1,11 @@
+import logging
import time
+import sqlalchemy
from click import argument, option
+from cryptography.fernet import InvalidToken
from flask.cli import AppGroup
from flask_migrate import stamp
-import sqlalchemy
from sqlalchemy.exc import DatabaseError
from sqlalchemy.sql import select
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
@@ -41,7 +43,7 @@ def load_extensions(db):
connection.execute(f'CREATE EXTENSION IF NOT EXISTS "{extension}";')
-@manager.command()
+@manager.command(name="create_tables")
def create_tables():
"""Create the database tables."""
from redash.models import db
@@ -61,7 +63,7 @@ def create_tables():
stamp()
-@manager.command()
+@manager.command(name="drop_tables")
def drop_tables():
"""Drop the database tables."""
from redash.models import db
@@ -81,8 +83,6 @@ def reencrypt(old_secret, new_secret, show_sql):
_wait_for_db_connection(db)
if show_sql:
- import logging
-
logging.basicConfig()
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
@@ -93,9 +93,7 @@ def _reencrypt_for_table(table_name, orm_name):
Column("id", key_type(orm_name), primary_key=True),
Column(
"encrypted_options",
- ConfigurationContainer.as_mutable(
- EncryptedConfiguration(db.Text, old_secret, FernetEngine)
- ),
+ ConfigurationContainer.as_mutable(EncryptedConfiguration(db.Text, old_secret, FernetEngine)),
),
)
table_for_update = sqlalchemy.Table(
@@ -104,19 +102,21 @@ def _reencrypt_for_table(table_name, orm_name):
Column("id", key_type(orm_name), primary_key=True),
Column(
"encrypted_options",
- ConfigurationContainer.as_mutable(
- EncryptedConfiguration(db.Text, new_secret, FernetEngine)
- ),
+ ConfigurationContainer.as_mutable(EncryptedConfiguration(db.Text, new_secret, FernetEngine)),
),
)
update = table_for_update.update()
selected_items = db.session.execute(select([table_for_select]))
for item in selected_items:
- stmt = update.where(table_for_update.c.id == item["id"]).values(
- encrypted_options=item["encrypted_options"]
- )
- db.session.execute(stmt)
+ try:
+ stmt = update.where(table_for_update.c.id == item["id"]).values(
+ encrypted_options=item["encrypted_options"]
+ )
+ except InvalidToken:
+ logging.error(f'Invalid Decryption Key for id {item["id"]} in table {table_for_select}')
+ else:
+ db.session.execute(stmt)
selected_items.close()
db.session.commit()
diff --git a/redash/cli/groups.py b/redash/cli/groups.py
index 1770057feb..2da7dcd776 100644
--- a/redash/cli/groups.py
+++ b/redash/cli/groups.py
@@ -1,8 +1,8 @@
from sys import exit
-from sqlalchemy.orm.exc import NoResultFound
-from flask.cli import AppGroup
from click import argument, option
+from flask.cli import AppGroup
+from sqlalchemy.orm.exc import NoResultFound
from redash import models
@@ -43,7 +43,7 @@ def create(name, permissions=None, organization="default"):
exit(1)
-@manager.command()
+@manager.command(name="change_permissions")
@argument("group_id")
@option(
"--permissions",
@@ -60,14 +60,11 @@ def change_permissions(group_id, permissions=None):
try:
group = models.Group.query.get(group_id)
except NoResultFound:
- print("User [%s] not found." % group_id)
+ print("Group [%s] not found." % group_id)
exit(1)
permissions = extract_permissions_string(permissions)
- print(
- "current permissions [%s] will be modify to [%s]"
- % (",".join(group.permissions), ",".join(permissions))
- )
+ print("current permissions [%s] will be modify to [%s]" % (",".join(group.permissions), ",".join(permissions)))
group.permissions = permissions
@@ -119,4 +116,7 @@ def list_command(organization=None):
members = models.Group.members(group.id)
user_names = [m.name for m in members]
- print("Users: {}".format(", ".join(user_names)))
+ if user_names:
+ print("Users: {}".format(", ".join(user_names)))
+ else:
+ print("Users:")
diff --git a/redash/cli/organization.py b/redash/cli/organization.py
index 45c73551fc..d941e06adb 100644
--- a/redash/cli/organization.py
+++ b/redash/cli/organization.py
@@ -1,4 +1,4 @@
-from click import argument
+from click import argument, option
from flask.cli import AppGroup
from redash import models
@@ -6,7 +6,7 @@
manager = AppGroup(help="Organization management commands.")
-@manager.command()
+@manager.command(name="set_google_apps_domains")
@argument("domains")
def set_google_apps_domains(domains):
"""
@@ -17,21 +17,32 @@ def set_google_apps_domains(domains):
organization.settings[k] = domains.split(",")
models.db.session.add(organization)
models.db.session.commit()
- print(
- "Updated list of allowed domains to: {}".format(
- organization.google_apps_domains
- )
- )
+ print("Updated list of allowed domains to: {}".format(organization.google_apps_domains))
-@manager.command()
+@manager.command(name="show_google_apps_domains")
def show_google_apps_domains():
organization = models.Organization.query.first()
- print(
- "Current list of Google Apps domains: {}".format(
- ", ".join(organization.google_apps_domains)
- )
- )
+ print("Current list of Google Apps domains: {}".format(", ".join(organization.google_apps_domains)))
+
+
+@manager.command(name="create")
+@argument("name")
+@option(
+ "--slug",
+ "slug",
+ default="default",
+ help="The slug the organization belongs to (leave blank for " "'default').",
+)
+def create(name, slug="default"):
+ print("Creating organization (%s)..." % (name))
+
+ try:
+ models.db.session.add(models.Organization(name=name, slug=slug, settings={}))
+ models.db.session.commit()
+ except Exception as e:
+ print("Failed create organization: %s" % e)
+ exit(1)
@manager.command(name="list")
diff --git a/redash/cli/queries.py b/redash/cli/queries.py
index f71bdbabe6..54943dee3c 100644
--- a/redash/cli/queries.py
+++ b/redash/cli/queries.py
@@ -5,7 +5,7 @@
manager = AppGroup(help="Queries management commands.")
-@manager.command()
+@manager.command(name="add_tag")
@argument("query_id")
@argument("tag")
def add_tag(query_id, tag):
@@ -31,7 +31,7 @@ def add_tag(query_id, tag):
print("Tag added.")
-@manager.command()
+@manager.command(name="remove_tag")
@argument("query_id")
@argument("tag")
def remove_tag(query_id, tag):
diff --git a/redash/cli/rq.py b/redash/cli/rq.py
index 97fdaab43d..c2c1ed6f7a 100644
--- a/redash/cli/rq.py
+++ b/redash/cli/rq.py
@@ -1,7 +1,5 @@
-from __future__ import absolute_import
-import socket
-import sys
import datetime
+import socket
from itertools import chain
from click import argument
@@ -14,11 +12,11 @@
from redash import rq_redis_connection
from redash.tasks import (
- Worker,
+ periodic_job_definitions,
rq_scheduler,
schedule_periodic_jobs,
- periodic_job_definitions,
)
+from redash.tasks.worker import Worker
from redash.worker import default_queues
manager = AppGroup(help="RQ management commands.")
@@ -55,11 +53,7 @@ class WorkerHealthcheck(base.BaseCheck):
def __call__(self, process_spec):
pid = process_spec["pid"]
all_workers = Worker.all(connection=rq_redis_connection)
- workers = [
- w
- for w in all_workers
- if w.hostname == socket.gethostname() and w.pid == pid
- ]
+ workers = [w for w in all_workers if w.hostname == socket.gethostname() and w.pid == pid]
if not workers:
self._log(f"Cannot find worker for hostname {socket.gethostname()} and pid {pid}. ==> Is healthy? False")
@@ -96,6 +90,4 @@ def __call__(self, process_spec):
@manager.command()
def healthcheck():
- return check_runner.CheckRunner(
- "worker_healthcheck", "worker", None, [(WorkerHealthcheck, {})]
- ).run()
+ return check_runner.CheckRunner("worker_healthcheck", "worker", None, [(WorkerHealthcheck, {})]).run()
diff --git a/redash/cli/users.py b/redash/cli/users.py
index fc6a4420ee..03e22dfa63 100644
--- a/redash/cli/users.py
+++ b/redash/cli/users.py
@@ -2,8 +2,8 @@
from click import BOOL, argument, option, prompt
from flask.cli import AppGroup
-from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.exc import IntegrityError
+from sqlalchemy.orm.exc import NoResultFound
from redash import models
from redash.handlers.users import invite_user
@@ -26,7 +26,7 @@ def build_groups(org, groups, is_admin):
return groups
-@manager.command()
+@manager.command(name="grant_admin")
@argument("email")
@option(
"--org",
@@ -116,7 +116,7 @@ def create(
exit(1)
-@manager.command()
+@manager.command(name="create_root")
@argument("email")
@argument("name")
@option(
@@ -136,17 +136,13 @@ def create(
"--password",
"password",
default=None,
- help="Password for root user who don't use Google Auth "
- "(leave blank for prompt).",
+ help="Password for root user who don't use Google Auth (leave blank for prompt).",
)
def create_root(email, name, google_auth=False, password=None, organization="default"):
"""
Create root user.
"""
- print(
- "Creating root user (%s, %s) in organization %s..."
- % (email, name, organization)
- )
+ print("Creating root user (%s, %s) in organization %s..." % (email, name, organization))
print("Login with Google Auth: %r\n" % google_auth)
user = models.User.query.filter(models.User.email == email).first()
@@ -155,15 +151,13 @@ def create_root(email, name, google_auth=False, password=None, organization="def
exit(1)
org_slug = organization
- org = models.Organization.query.filter(
- models.Organization.slug == org_slug
- ).first()
+ org = models.Organization.query.filter(models.Organization.slug == org_slug).first()
if org is None:
org = models.Organization(name=org_slug, slug=org_slug, settings={})
admin_group = models.Group(
name="admin",
- permissions=["admin", "super_admin"],
+ permissions=models.Group.ADMIN_PERMISSIONS,
org=org,
type=models.Group.BUILTIN_GROUP,
)
@@ -208,13 +202,9 @@ def delete(email, organization=None):
"""
if organization:
org = models.Organization.get_by_slug(organization)
- deleted_count = models.User.query.filter(
- models.User.email == email, models.User.org == org.id
- ).delete()
+ deleted_count = models.User.query.filter(models.User.email == email, models.User.org == org.id).delete()
else:
- deleted_count = models.User.query.filter(models.User.email == email).delete(
- synchronize_session=False
- )
+ deleted_count = models.User.query.filter(models.User.email == email).delete(synchronize_session=False)
models.db.session.commit()
print("Deleted %d users." % deleted_count)
@@ -234,9 +224,7 @@ def password(email, password, organization=None):
"""
if organization:
org = models.Organization.get_by_slug(organization)
- user = models.User.query.filter(
- models.User.email == email, models.User.org == org
- ).first()
+ user = models.User.query.filter(models.User.email == email, models.User.org == org).first()
else:
user = models.User.query.filter(models.User.email == email).first()
@@ -265,7 +253,7 @@ def password(email, password, organization=None):
"--groups",
"groups",
default=None,
- help="Comma seperated list of groups (leave blank for default).",
+ help="Comma separated list of groups (leave blank for default).",
)
def invite(email, name, inviter_email, groups, is_admin=False, organization="default"):
"""
diff --git a/redash/destinations/__init__.py b/redash/destinations/__init__.py
index bbd5fef9cb..4a24e99103 100644
--- a/redash/destinations/__init__.py
+++ b/redash/destinations/__init__.py
@@ -5,7 +5,7 @@
__all__ = ["BaseDestination", "register", "get_destination", "import_destinations"]
-class BaseDestination(object):
+class BaseDestination:
deprecated = False
def __init__(self, configuration):
@@ -31,7 +31,7 @@ def enabled(cls):
def configuration_schema(cls):
return {}
- def notify(self, alert, query, user, new_state, app, host, options):
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
raise NotImplementedError()
@classmethod
@@ -41,7 +41,7 @@ def to_dict(cls):
"type": cls.type(),
"icon": cls.icon(),
"configuration_schema": cls.configuration_schema(),
- **({ "deprecated": True } if cls.deprecated else {})
+ **({"deprecated": True} if cls.deprecated else {}),
}
diff --git a/redash/destinations/asana.py b/redash/destinations/asana.py
new file mode 100644
index 0000000000..394633a265
--- /dev/null
+++ b/redash/destinations/asana.py
@@ -0,0 +1,64 @@
+import logging
+import textwrap
+
+import requests
+
+from redash.destinations import BaseDestination, register
+from redash.models import Alert
+
+
+class Asana(BaseDestination):
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "pat": {"type": "string", "title": "Asana Personal Access Token"},
+ "project_id": {"type": "string", "title": "Asana Project ID"},
+ },
+ "secret": ["pat"],
+ "required": ["pat", "project_id"],
+ }
+
+ @classmethod
+ def icon(cls):
+ return "fa-asana"
+
+ @property
+ def api_base_url(self):
+ return "https://app.asana.com/api/1.0/tasks"
+
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
+ # Documentation: https://developers.asana.com/docs/tasks
+ state = "TRIGGERED" if new_state == Alert.TRIGGERED_STATE else "RECOVERED"
+
+ notes = textwrap.dedent(
+ f"""
+ {alert.name} has {state}.
+
+ Query: {host}/queries/{query.id}
+ Alert: {host}/alerts/{alert.id}
+ """
+ ).strip()
+
+ data = {
+ "name": f"[Redash Alert] {state}: {alert.name}",
+ "notes": notes,
+ "projects": [options["project_id"]],
+ }
+
+ try:
+ resp = requests.post(
+ self.api_base_url,
+ data=data,
+ timeout=5.0,
+ headers={"Authorization": f"Bearer {options['pat']}"},
+ )
+ logging.warning(resp.text)
+ if resp.status_code != 201:
+ logging.error("Asana send ERROR. status_code => {status}".format(status=resp.status_code))
+ except Exception as e:
+ logging.exception("Asana send ERROR. {exception}".format(exception=e))
+
+
+register(Asana)
diff --git a/redash/destinations/chatwork.py b/redash/destinations/chatwork.py
index b3ea726608..46904750c1 100644
--- a/redash/destinations/chatwork.py
+++ b/redash/destinations/chatwork.py
@@ -1,13 +1,12 @@
import logging
+
import requests
-from redash.destinations import *
+from redash.destinations import BaseDestination, register
class ChatWork(BaseDestination):
- ALERTS_DEFAULT_MESSAGE_TEMPLATE = (
- "{alert_name} changed state to {new_state}.\\n{alert_url}\\n{query_url}"
- )
+ ALERTS_DEFAULT_MESSAGE_TEMPLATE = "{alert_name} changed state to {new_state}.\\n{alert_url}\\n{query_url}"
@classmethod
def configuration_schema(cls):
@@ -30,12 +29,10 @@ def configuration_schema(cls):
def icon(cls):
return "fa-comment"
- def notify(self, alert, query, user, new_state, app, host, options):
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
try:
# Documentation: http://developer.chatwork.com/ja/endpoint_rooms.html#POST-rooms-room_id-messages
- url = "https://api.chatwork.com/v2/rooms/{room_id}/messages".format(
- room_id=options.get("room_id")
- )
+ url = "https://api.chatwork.com/v2/rooms/{room_id}/messages".format(room_id=options.get("room_id"))
message = ""
if alert.custom_subject:
@@ -43,15 +40,9 @@ def notify(self, alert, query, user, new_state, app, host, options):
if alert.custom_body:
message += alert.custom_body
else:
- alert_url = "{host}/alerts/{alert_id}".format(
- host=host, alert_id=alert.id
- )
- query_url = "{host}/queries/{query_id}".format(
- host=host, query_id=query.id
- )
- message_template = options.get(
- "message_template", ChatWork.ALERTS_DEFAULT_MESSAGE_TEMPLATE
- )
+ alert_url = "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id)
+ query_url = "{host}/queries/{query_id}".format(host=host, query_id=query.id)
+ message_template = options.get("message_template", ChatWork.ALERTS_DEFAULT_MESSAGE_TEMPLATE)
message += message_template.replace("\\n", "\n").format(
alert_name=alert.name,
new_state=new_state.upper(),
@@ -65,11 +56,7 @@ def notify(self, alert, query, user, new_state, app, host, options):
resp = requests.post(url, headers=headers, data=payload, timeout=5.0)
logging.warning(resp.text)
if resp.status_code != 200:
- logging.error(
- "ChatWork send ERROR. status_code => {status}".format(
- status=resp.status_code
- )
- )
+ logging.error("ChatWork send ERROR. status_code => {status}".format(status=resp.status_code))
except Exception:
logging.exception("ChatWork send ERROR.")
diff --git a/redash/destinations/datadog.py b/redash/destinations/datadog.py
new file mode 100644
index 0000000000..61a4e0ddc0
--- /dev/null
+++ b/redash/destinations/datadog.py
@@ -0,0 +1,93 @@
+import logging
+import os
+
+import requests
+
+from redash.destinations import BaseDestination, register
+from redash.utils import json_dumps
+
+
+class Datadog(BaseDestination):
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "api_key": {"type": "string", "title": "API Key"},
+ "tags": {"type": "string", "title": "Tags"},
+ "priority": {"type": "string", "default": "normal", "title": "Priority"},
+ # https://docs.datadoghq.com/integrations/faq/list-of-api-source-attribute-value/
+ "source_type_name": {"type": "string", "default": "my_apps", "title": "Source Type Name"},
+ },
+ "secret": ["api_key"],
+ "required": ["api_key"],
+ }
+
+ @classmethod
+ def icon(cls):
+ return "fa-datadog"
+
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
+ # Documentation: https://docs.datadoghq.com/api/latest/events/#post-an-event
+ if new_state == "triggered":
+ alert_type = "error"
+ if alert.custom_subject:
+ title = alert.custom_subject
+ else:
+ title = f"{alert.name} just triggered"
+ else:
+ alert_type = "success"
+ if alert.custom_subject:
+ title = alert.custom_subject
+ else:
+ title = f"{alert.name} went back to normal"
+
+ if alert.custom_body:
+ text = alert.custom_body
+ else:
+ text = f"{alert.name} changed state to {new_state}."
+
+ query_url = f"{host}/queries/{query.id}"
+ alert_url = f"{host}/alerts/{alert.id}"
+ text += f"\nQuery: {query_url}\nAlert: {alert_url}"
+
+ headers = {
+ "Accept": "application/json",
+ "Content-Type": "application/json",
+ "DD-API-KEY": options.get("api_key"),
+ }
+
+ body = {
+ "title": title,
+ "text": text,
+ "alert_type": alert_type,
+ "priority": options.get("priority"),
+ "source_type_name": options.get("source_type_name"),
+ "aggregation_key": f"redash:{alert_url}",
+ "tags": [],
+ }
+
+ tags = options.get("tags")
+ if tags:
+ body["tags"] = tags.split(",")
+ body["tags"].extend(
+ [
+ "redash",
+ f"query_id:{query.id}",
+ f"alert_id:{alert.id}",
+ ]
+ )
+
+ dd_host = os.getenv("DATADOG_HOST", "api.datadoghq.com")
+ url = f"https://{dd_host}/api/v1/events"
+
+ try:
+ resp = requests.post(url, headers=headers, data=json_dumps(body), timeout=5.0)
+ logging.warning(resp.text)
+ if resp.status_code != 202:
+ logging.error(f"Datadog send ERROR. status_code => {resp.status_code}")
+ except Exception as e:
+ logging.exception("Datadog send ERROR: %s", e)
+
+
+register(Datadog)
diff --git a/redash/destinations/discord.py b/redash/destinations/discord.py
new file mode 100644
index 0000000000..c6deca20f6
--- /dev/null
+++ b/redash/destinations/discord.py
@@ -0,0 +1,70 @@
+import logging
+
+import requests
+
+from redash.destinations import BaseDestination, register
+from redash.models import Alert
+from redash.utils import json_dumps
+
+colors = {
+ # Colors are in a Decimal format as Discord requires them to be Decimals for embeds
+ Alert.OK_STATE: "2600544", # Green Decimal Code
+ Alert.TRIGGERED_STATE: "12597547", # Red Decimal Code
+ Alert.UNKNOWN_STATE: "16776960", # Yellow Decimal Code
+}
+
+
+class Discord(BaseDestination):
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {"url": {"type": "string", "title": "Discord Webhook URL"}},
+ "secret": ["url"],
+ "required": ["url"],
+ }
+
+ @classmethod
+ def icon(cls):
+ return "fa-discord"
+
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
+ # Documentation: https://birdie0.github.io/discord-webhooks-guide/discord_webhook.html
+ fields = [
+ {
+ "name": "Query",
+ "value": f"{host}/queries/{query.id}",
+ "inline": True,
+ },
+ {
+ "name": "Alert",
+ "value": f"{host}/alerts/{alert.id}",
+ "inline": True,
+ },
+ ]
+ if alert.custom_body:
+ fields.append({"name": "Description", "value": alert.custom_body})
+ if new_state == Alert.TRIGGERED_STATE:
+ if alert.options.get("custom_subject"):
+ text = alert.options["custom_subject"]
+ else:
+ text = f"{alert.name} just triggered"
+ else:
+ text = f"{alert.name} went back to normal"
+ color = colors.get(new_state)
+ payload = {"content": text, "embeds": [{"color": color, "fields": fields}]}
+ headers = {"Content-Type": "application/json"}
+ try:
+ resp = requests.post(
+ options.get("url"),
+ data=json_dumps(payload),
+ headers=headers,
+ timeout=5.0,
+ )
+ if resp.status_code != 200 and resp.status_code != 204:
+ logging.error(f"Discord send ERROR. status_code => {resp.status_code}")
+ except Exception as e:
+ logging.exception("Discord send ERROR: %s", e)
+
+
+register(Discord)
diff --git a/redash/destinations/email.py b/redash/destinations/email.py
index 11923aa2ed..cb835212c7 100644
--- a/redash/destinations/email.py
+++ b/redash/destinations/email.py
@@ -1,8 +1,9 @@
import logging
from flask_mail import Message
+
from redash import mail, settings
-from redash.destinations import *
+from redash.destinations import BaseDestination, register
class Email(BaseDestination):
@@ -26,10 +27,8 @@ def configuration_schema(cls):
def icon(cls):
return "fa-envelope"
- def notify(self, alert, query, user, new_state, app, host, options):
- recipients = [
- email for email in options.get("addresses", "").split(",") if email
- ]
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
+ recipients = [email for email in options.get("addresses", "").split(",") if email]
if not recipients:
logging.warning("No emails given. Skipping send.")
@@ -37,12 +36,8 @@ def notify(self, alert, query, user, new_state, app, host, options):
if alert.custom_body:
html = alert.custom_body
else:
- html = """
- Check alert / check
- query .
- """.format(
- host=host, alert_id=alert.id, query_id=query.id
- )
+ with open(settings.REDASH_ALERTS_DEFAULT_MAIL_BODY_TEMPLATE_FILE, "r") as f:
+ html = alert.render_template(f.read())
logging.debug("Notifying: %s", recipients)
try:
@@ -50,9 +45,7 @@ def notify(self, alert, query, user, new_state, app, host, options):
if alert.custom_subject:
subject = alert.custom_subject
else:
- subject_template = options.get(
- "subject_template", settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE
- )
+ subject_template = options.get("subject_template", settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE)
subject = subject_template.format(alert_name=alert.name, state=state)
message = Message(recipients=recipients, subject=subject, html=html)
diff --git a/redash/destinations/hangoutschat.py b/redash/destinations/hangoutschat.py
index e896913b2a..f090662d50 100644
--- a/redash/destinations/hangoutschat.py
+++ b/redash/destinations/hangoutschat.py
@@ -1,7 +1,8 @@
import logging
+
import requests
-from redash.destinations import *
+from redash.destinations import BaseDestination, register
from redash.utils import json_dumps
@@ -36,16 +37,14 @@ def configuration_schema(cls):
def icon(cls):
return "fa-bolt"
- def notify(self, alert, query, user, new_state, app, host, options):
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
try:
if new_state == "triggered":
message = 'Triggered '
elif new_state == "ok":
message = 'Went back to normal '
else:
- message = (
- "Unable to determine status. Check Query and Alert configuration."
- )
+ message = "Unable to determine status. Check Query and Alert configuration."
if alert.custom_subject:
title = alert.custom_subject
@@ -56,17 +55,13 @@ def notify(self, alert, query, user, new_state, app, host, options):
"cards": [
{
"header": {"title": title},
- "sections": [
- {"widgets": [{"textParagraph": {"text": message}}]}
- ],
+ "sections": [{"widgets": [{"textParagraph": {"text": message}}]}],
}
]
}
if alert.custom_body:
- data["cards"][0]["sections"].append(
- {"widgets": [{"textParagraph": {"text": alert.custom_body}}]}
- )
+ data["cards"][0]["sections"].append({"widgets": [{"textParagraph": {"text": alert.custom_body}}]})
if options.get("icon_url"):
data["cards"][0]["header"]["imageUrl"] = options.get("icon_url")
@@ -81,9 +76,7 @@ def notify(self, alert, query, user, new_state, app, host, options):
"text": "OPEN QUERY",
"onClick": {
"openLink": {
- "url": "{host}/queries/{query_id}".format(
- host=host, query_id=query.id
- )
+ "url": "{host}/queries/{query_id}".format(host=host, query_id=query.id)
}
},
}
@@ -93,15 +86,9 @@ def notify(self, alert, query, user, new_state, app, host, options):
)
headers = {"Content-Type": "application/json; charset=UTF-8"}
- resp = requests.post(
- options.get("url"), data=json_dumps(data), headers=headers, timeout=5.0
- )
+ resp = requests.post(options.get("url"), data=json_dumps(data), headers=headers, timeout=5.0)
if resp.status_code != 200:
- logging.error(
- "webhook send ERROR. status_code => {status}".format(
- status=resp.status_code
- )
- )
+ logging.error("webhook send ERROR. status_code => {status}".format(status=resp.status_code))
except Exception:
logging.exception("webhook send ERROR.")
diff --git a/redash/destinations/hipchat.py b/redash/destinations/hipchat.py
deleted file mode 100644
index a8bd8c882f..0000000000
--- a/redash/destinations/hipchat.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import logging
-import requests
-
-from redash.destinations import *
-from redash.models import Alert
-from redash.utils import json_dumps, deprecated
-
-
-colors = {
- Alert.OK_STATE: "green",
- Alert.TRIGGERED_STATE: "red",
- Alert.UNKNOWN_STATE: "yellow",
-}
-
-
-@deprecated()
-class HipChat(BaseDestination):
- @classmethod
- def configuration_schema(cls):
- return {
- "type": "object",
- "properties": {
- "url": {
- "type": "string",
- "title": "HipChat Notification URL (get it from the Integrations page)",
- }
- },
- "secret": ["url"],
- "required": ["url"],
- }
-
- @classmethod
- def icon(cls):
- return "fa-comment-o"
-
- def notify(self, alert, query, user, new_state, app, host, options):
- try:
- alert_url = "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id)
- query_url = "{host}/queries/{query_id}".format(host=host, query_id=query.id)
-
- message = '{alert_name} changed state to {new_state} (based on this query ).'.format(
- alert_name=alert.name,
- new_state=new_state.upper(),
- alert_url=alert_url,
- query_url=query_url,
- )
-
- data = {"message": message, "color": colors.get(new_state, "green")}
- headers = {"Content-Type": "application/json"}
- response = requests.post(
- options["url"], data=json_dumps(data), headers=headers, timeout=5.0
- )
-
- if response.status_code != 204:
- logging.error(
- "Bad status code received from HipChat: %d", response.status_code
- )
- except Exception:
- logging.exception("HipChat Send ERROR.")
-
-
-register(HipChat)
diff --git a/redash/destinations/mattermost.py b/redash/destinations/mattermost.py
index 106e1184ef..6254ecf71e 100644
--- a/redash/destinations/mattermost.py
+++ b/redash/destinations/mattermost.py
@@ -1,7 +1,8 @@
import logging
+
import requests
-from redash.destinations import *
+from redash.destinations import BaseDestination, register
from redash.utils import json_dumps
@@ -16,14 +17,14 @@ def configuration_schema(cls):
"icon_url": {"type": "string", "title": "Icon (URL)"},
"channel": {"type": "string", "title": "Channel"},
},
- "secret": "url"
+ "secret": "url",
}
@classmethod
def icon(cls):
return "fa-bolt"
- def notify(self, alert, query, user, new_state, app, host, options):
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
if alert.custom_subject:
text = alert.custom_subject
elif new_state == "triggered":
@@ -33,9 +34,7 @@ def notify(self, alert, query, user, new_state, app, host, options):
payload = {"text": text}
if alert.custom_body:
- payload["attachments"] = [
- {"fields": [{"title": "Description", "value": alert.custom_body}]}
- ]
+ payload["attachments"] = [{"fields": [{"title": "Description", "value": alert.custom_body}]}]
if options.get("username"):
payload["username"] = options.get("username")
@@ -45,17 +44,11 @@ def notify(self, alert, query, user, new_state, app, host, options):
payload["channel"] = options.get("channel")
try:
- resp = requests.post(
- options.get("url"), data=json_dumps(payload), timeout=5.0
- )
+ resp = requests.post(options.get("url"), data=json_dumps(payload), timeout=5.0)
logging.warning(resp.text)
if resp.status_code != 200:
- logging.error(
- "Mattermost webhook send ERROR. status_code => {status}".format(
- status=resp.status_code
- )
- )
+ logging.error("Mattermost webhook send ERROR. status_code => {status}".format(status=resp.status_code))
except Exception:
logging.exception("Mattermost webhook send ERROR.")
diff --git a/redash/destinations/microsoft_teams_webhook.py b/redash/destinations/microsoft_teams_webhook.py
new file mode 100644
index 0000000000..8c64d93534
--- /dev/null
+++ b/redash/destinations/microsoft_teams_webhook.py
@@ -0,0 +1,114 @@
+import logging
+from string import Template
+
+import requests
+
+from redash.destinations import BaseDestination, register
+from redash.utils import json_dumps
+
+
+def json_string_substitute(j, substitutions):
+ """
+ Alternative to string.format when the string has braces.
+ :param j: json string that will have substitutions
+ :type j: str
+ :param substitutions: dictionary of values to be replaced
+ :type substitutions: dict
+ """
+ if substitutions:
+ substitution_candidate = j.replace("{", "${")
+ string_template = Template(substitution_candidate)
+ substituted = string_template.safe_substitute(substitutions)
+ out_str = substituted.replace("${", "{")
+ return out_str
+ else:
+ return j
+
+
+class MicrosoftTeamsWebhook(BaseDestination):
+ ALERTS_DEFAULT_MESSAGE_TEMPLATE = json_dumps(
+ {
+ "@type": "MessageCard",
+ "@context": "http://schema.org/extensions",
+ "themeColor": "0076D7",
+ "summary": "A Redash Alert was Triggered",
+ "sections": [
+ {
+ "activityTitle": "A Redash Alert was Triggered",
+ "facts": [
+ {"name": "Alert Name", "value": "{alert_name}"},
+ {"name": "Alert URL", "value": "{alert_url}"},
+ {"name": "Query", "value": "{query_text}"},
+ {"name": "Query URL", "value": "{query_url}"},
+ ],
+ "markdown": True,
+ }
+ ],
+ }
+ )
+
+ @classmethod
+ def name(cls):
+ return "Microsoft Teams Webhook"
+
+ @classmethod
+ def type(cls):
+ return "microsoft_teams_webhook"
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "url": {"type": "string", "title": "Microsoft Teams Webhook URL"},
+ "message_template": {
+ "type": "string",
+ "default": MicrosoftTeamsWebhook.ALERTS_DEFAULT_MESSAGE_TEMPLATE,
+ "title": "Message Template",
+ },
+ },
+ "required": ["url"],
+ }
+
+ @classmethod
+ def icon(cls):
+ return "fa-bolt"
+
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
+ """
+ :type app: redash.Redash
+ """
+ try:
+ alert_url = "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id)
+
+ query_url = "{host}/queries/{query_id}".format(host=host, query_id=query.id)
+
+ message_template = options.get("message_template", MicrosoftTeamsWebhook.ALERTS_DEFAULT_MESSAGE_TEMPLATE)
+
+ # Doing a string Template substitution here because the template contains braces, which
+ # result in keyerrors when attempting string.format
+ payload = json_string_substitute(
+ message_template,
+ {
+ "alert_name": alert.name,
+ "alert_url": alert_url,
+ "query_text": query.query_text,
+ "query_url": query_url,
+ },
+ )
+
+ headers = {"Content-Type": "application/json"}
+
+ resp = requests.post(
+ options.get("url"),
+ data=payload,
+ headers=headers,
+ timeout=5.0,
+ )
+ if resp.status_code != 200:
+ logging.error("MS Teams Webhook send ERROR. status_code => {status}".format(status=resp.status_code))
+ except Exception:
+ logging.exception("MS Teams Webhook send ERROR.")
+
+
+register(MicrosoftTeamsWebhook)
diff --git a/redash/destinations/pagerduty.py b/redash/destinations/pagerduty.py
index 9ffb5fbc65..570c876da3 100644
--- a/redash/destinations/pagerduty.py
+++ b/redash/destinations/pagerduty.py
@@ -1,5 +1,6 @@
import logging
-from redash.destinations import *
+
+from redash.destinations import BaseDestination, register
enabled = True
@@ -10,7 +11,6 @@
class PagerDuty(BaseDestination):
-
KEY_STRING = "{alert_id}_{query_id}"
DESCRIPTION_STR = "Alert: {alert_name}"
@@ -40,8 +40,7 @@ def configuration_schema(cls):
def icon(cls):
return "creative-commons-pd-alt"
- def notify(self, alert, query, user, new_state, app, host, options):
-
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
if alert.custom_subject:
default_desc = alert.custom_subject
elif options.get("description"):
@@ -73,7 +72,6 @@ def notify(self, alert, query, user, new_state, app, host, options):
data["event_action"] = "resolve"
try:
-
ev = pypd.EventV2.create(data=data)
logging.warning(ev)
diff --git a/redash/destinations/slack.py b/redash/destinations/slack.py
index a5fd834283..a7e44b6a7c 100644
--- a/redash/destinations/slack.py
+++ b/redash/destinations/slack.py
@@ -1,7 +1,8 @@
import logging
+
import requests
-from redash.destinations import *
+from redash.destinations import BaseDestination, register
from redash.utils import json_dumps
@@ -12,34 +13,26 @@ def configuration_schema(cls):
"type": "object",
"properties": {
"url": {"type": "string", "title": "Slack Webhook URL"},
- "username": {"type": "string", "title": "Username"},
- "icon_emoji": {"type": "string", "title": "Icon (Emoji)"},
- "icon_url": {"type": "string", "title": "Icon (URL)"},
- "channel": {"type": "string", "title": "Channel"},
},
- "secret": ["url"]
+ "secret": ["url"],
}
@classmethod
def icon(cls):
return "fa-slack"
- def notify(self, alert, query, user, new_state, app, host, options):
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
# Documentation: https://api.slack.com/docs/attachments
fields = [
{
"title": "Query",
- "value": "{host}/queries/{query_id}".format(
- host=host, query_id=query.id
- ),
- "short": True,
+ "type": "mrkdwn",
+ "value": "{host}/queries/{query_id}".format(host=host, query_id=query.id),
},
{
"title": "Alert",
- "value": "{host}/alerts/{alert_id}".format(
- host=host, alert_id=alert.id
- ),
- "short": True,
+ "type": "mrkdwn",
+ "value": "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id),
},
]
if alert.custom_body:
@@ -56,26 +49,11 @@ def notify(self, alert, query, user, new_state, app, host, options):
payload = {"attachments": [{"text": text, "color": color, "fields": fields}]}
- if options.get("username"):
- payload["username"] = options.get("username")
- if options.get("icon_emoji"):
- payload["icon_emoji"] = options.get("icon_emoji")
- if options.get("icon_url"):
- payload["icon_url"] = options.get("icon_url")
- if options.get("channel"):
- payload["channel"] = options.get("channel")
-
try:
- resp = requests.post(
- options.get("url"), data=json_dumps(payload), timeout=5.0
- )
+ resp = requests.post(options.get("url"), data=json_dumps(payload).encode("utf-8"), timeout=5.0)
logging.warning(resp.text)
if resp.status_code != 200:
- logging.error(
- "Slack send ERROR. status_code => {status}".format(
- status=resp.status_code
- )
- )
+ logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
except Exception:
logging.exception("Slack send ERROR.")
diff --git a/redash/destinations/webex.py b/redash/destinations/webex.py
new file mode 100644
index 0000000000..16d8ed05a6
--- /dev/null
+++ b/redash/destinations/webex.py
@@ -0,0 +1,138 @@
+import logging
+from copy import deepcopy
+
+import requests
+
+from redash.destinations import BaseDestination, register
+from redash.models import Alert
+
+
+class Webex(BaseDestination):
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "webex_bot_token": {"type": "string", "title": "Webex Bot Token"},
+ "to_person_emails": {
+ "type": "string",
+ "title": "People (comma-separated)",
+ },
+ "to_room_ids": {
+ "type": "string",
+ "title": "Rooms (comma-separated)",
+ },
+ },
+ "secret": ["webex_bot_token"],
+ "required": ["webex_bot_token"],
+ }
+
+ @classmethod
+ def icon(cls):
+ return "fa-webex"
+
+ @property
+ def api_base_url(self):
+ return "https://webexapis.com/v1/messages"
+
+ @staticmethod
+ def formatted_attachments_template(subject, description, query_link, alert_link):
+ return [
+ {
+ "contentType": "application/vnd.microsoft.card.adaptive",
+ "content": {
+ "$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
+ "type": "AdaptiveCard",
+ "version": "1.0",
+ "body": [
+ {
+ "type": "ColumnSet",
+ "columns": [
+ {
+ "type": "Column",
+ "width": 4,
+ "items": [
+ {
+ "type": "TextBlock",
+ "text": {subject},
+ "weight": "bolder",
+ "size": "medium",
+ "wrap": True,
+ },
+ {
+ "type": "TextBlock",
+ "text": {description},
+ "isSubtle": True,
+ "wrap": True,
+ },
+ {
+ "type": "TextBlock",
+ "text": f"Click [here]({query_link}) to check your query!",
+ "wrap": True,
+ "isSubtle": True,
+ },
+ {
+ "type": "TextBlock",
+ "text": f"Click [here]({alert_link}) to check your alert!",
+ "wrap": True,
+ "isSubtle": True,
+ },
+ ],
+ },
+ ],
+ }
+ ],
+ },
+ }
+ ]
+
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
+ # Documentation: https://developer.webex.com/docs/api/guides/cards
+
+ query_link = f"{host}/queries/{query.id}"
+ alert_link = f"{host}/alerts/{alert.id}"
+
+ if new_state == Alert.TRIGGERED_STATE:
+ subject = alert.custom_subject or f"{alert.name} just triggered"
+ else:
+ subject = f"{alert.name} went back to normal"
+
+ attachments = self.formatted_attachments_template(
+ subject=subject, description=alert.custom_body, query_link=query_link, alert_link=alert_link
+ )
+
+ template_payload = {"markdown": subject + "\n" + alert.custom_body, "attachments": attachments}
+
+ headers = {"Authorization": f"Bearer {options['webex_bot_token']}"}
+
+ api_destinations = {
+ "toPersonEmail": options.get("to_person_emails"),
+ "roomId": options.get("to_room_ids"),
+ }
+
+ for payload_tag, destinations in api_destinations.items():
+ if destinations is None:
+ continue
+
+ # destinations is guaranteed to be a comma-separated string
+ for destination_id in destinations.split(","):
+ payload = deepcopy(template_payload)
+ payload[payload_tag] = destination_id
+ self.post_message(payload, headers)
+
+ def post_message(self, payload, headers):
+ try:
+ resp = requests.post(
+ self.api_base_url,
+ json=payload,
+ headers=headers,
+ timeout=5.0,
+ )
+ logging.warning(resp.text)
+ if resp.status_code != 200:
+ logging.error("Webex send ERROR. status_code => {status}".format(status=resp.status_code))
+ except Exception as e:
+ logging.exception(f"Webex send ERROR: {e}")
+
+
+register(Webex)
diff --git a/redash/destinations/webhook.py b/redash/destinations/webhook.py
index ad5ccb1e9a..b98b6f18a3 100644
--- a/redash/destinations/webhook.py
+++ b/redash/destinations/webhook.py
@@ -1,10 +1,11 @@
import logging
+
import requests
from requests.auth import HTTPBasicAuth
-from redash.destinations import *
-from redash.utils import json_dumps
+from redash.destinations import BaseDestination, register
from redash.serializers import serialize_alert
+from redash.utils import json_dumps
class Webhook(BaseDestination):
@@ -25,23 +26,20 @@ def configuration_schema(cls):
def icon(cls):
return "fa-bolt"
- def notify(self, alert, query, user, new_state, app, host, options):
+ def notify(self, alert, query, user, new_state, app, host, metadata, options):
try:
data = {
"event": "alert_state_change",
"alert": serialize_alert(alert, full=False),
"url_base": host,
+ "metadata": metadata,
}
data["alert"]["description"] = alert.custom_body
data["alert"]["title"] = alert.custom_subject
headers = {"Content-Type": "application/json"}
- auth = (
- HTTPBasicAuth(options.get("username"), options.get("password"))
- if options.get("username")
- else None
- )
+ auth = HTTPBasicAuth(options.get("username"), options.get("password")) if options.get("username") else None
resp = requests.post(
options.get("url"),
data=json_dumps(data),
@@ -50,11 +48,7 @@ def notify(self, alert, query, user, new_state, app, host, options):
timeout=5.0,
)
if resp.status_code != 200:
- logging.error(
- "webhook send ERROR. status_code => {status}".format(
- status=resp.status_code
- )
- )
+ logging.error("webhook send ERROR. status_code => {status}".format(status=resp.status_code))
except Exception:
logging.exception("webhook send ERROR.")
diff --git a/redash/extensions.py b/redash/extensions.py
deleted file mode 100644
index 56452d1d13..0000000000
--- a/redash/extensions.py
+++ /dev/null
@@ -1,107 +0,0 @@
-import logging
-from collections import OrderedDict as odict
-
-from importlib_metadata import entry_points
-
-# The global Redash extension registry
-extensions = odict()
-
-# The periodic RQ jobs as provided by Redash extensions.
-# This is separate from the internal periodic RQ jobs
-# since the extension job discovery phase is
-# after the configuration has already happened.
-periodic_jobs = odict()
-
-extension_logger = logging.getLogger(__name__)
-
-
-def entry_point_loader(group_name, mapping, logger=None, *args, **kwargs):
- """
- Loads the list Python entry points with the given entry point group name
- (e.g. "redash.extensions"), calls each with the provided *args/**kwargs
- arguments and stores the results in the provided mapping under the name
- of the entry point.
-
- If provided, the logger is used for error and debugging statements.
- """
- if logger is None:
- logger = extension_logger
-
- for entry_point in entry_points().get(group_name, []):
- logger.info('Loading entry point "%s".', entry_point.name)
- try:
- # Then try to load the entry point (import and getattr)
- obj = entry_point.load()
- except (ImportError, AttributeError):
- # or move on
- logger.error(
- 'Entry point "%s" could not be found.', entry_point.name, exc_info=True
- )
- continue
-
- if not callable(obj):
- logger.error('Entry point "%s" is not a callable.', entry_point.name)
- continue
-
- try:
- # then simply call the loaded entry point.
- mapping[entry_point.name] = obj(*args, **kwargs)
- except AssertionError:
- logger.error(
- 'Entry point "%s" cound not be loaded.', entry_point.name, exc_info=True
- )
- continue
-
-
-def load_extensions(app):
- """Load the Redash extensions for the given Redash Flask app.
-
- The extension entry point can return any type of value but
- must take a Flask application object.
-
- E.g.::
-
- def extension(app):
- app.logger.info("Loading the Foobar extenions")
- Foobar(app)
-
- """
- entry_point_loader("redash.extensions", extensions, logger=app.logger, app=app)
-
-
-def load_periodic_jobs(logger=None):
- """Load the periodic jobs as defined in Redash extensions.
-
- The periodic task entry point needs to return a set of parameters
- that can be passed to RQ Scheduler API:
-
- https://github.com/rq/rq-scheduler#periodic--repeated-jobs
-
- E.g.::
-
- def add_two_and_two():
- return {
- "func": add,
- "args": [2, 2]
- "interval": 10, # in seconds or as a timedelta
- }
-
- and then registered with an entry point under the "redash.periodic_jobs"
- group, e.g. in your setup.py::
-
- setup(
- # ...
- entry_points={
- "redash.periodic_jobs": [
- "add_two_and_two = calculus.addition:add_two_and_two",
- ]
- # ...
- },
- # ...
- )
- """
- entry_point_loader("redash.periodic_jobs", periodic_jobs, logger=logger)
-
-
-def init_app(app):
- load_extensions(app)
diff --git a/redash/handlers/__init__.py b/redash/handlers/__init__.py
index a7a05ed0f7..8c6e61d8fe 100644
--- a/redash/handlers/__init__.py
+++ b/redash/handlers/__init__.py
@@ -24,13 +24,13 @@ def status_api():
def init_app(app):
from redash.handlers import (
+ admin,
+ authentication,
embed,
+ organization,
queries,
- static,
- authentication,
- admin,
setup,
- organization,
+ static,
)
app.register_blueprint(routes)
diff --git a/redash/handlers/admin.py b/redash/handlers/admin.py
index 753494d341..b376beec25 100644
--- a/redash/handlers/admin.py
+++ b/redash/handlers/admin.py
@@ -1,14 +1,13 @@
-from flask import request
-from flask_login import login_required, current_user
+from flask_login import current_user, login_required
from redash import models, redis_connection
from redash.authentication import current_org
from redash.handlers import routes
from redash.handlers.base import json_response, record_event
+from redash.monitor import rq_status
from redash.permissions import require_super_admin
from redash.serializers import QuerySerializer
from redash.utils import json_loads
-from redash.monitor import rq_status
@routes.route("/api/admin/queries/outdated", methods=["GET"])
@@ -29,13 +28,14 @@ def outdated_queries():
record_event(
current_org,
current_user._get_current_object(),
- {"action": "list", "object_type": "outdated_queries",},
+ {
+ "action": "list",
+ "object_type": "outdated_queries",
+ },
)
response = {
- "queries": QuerySerializer(
- outdated_queries, with_stats=True, with_last_modified_by=False
- ).serialize(),
+ "queries": QuerySerializer(outdated_queries, with_stats=True, with_last_modified_by=False).serialize(),
"updated_at": manager_status["last_refresh_at"],
}
return json_response(response)
diff --git a/redash/handlers/alerts.py b/redash/handlers/alerts.py
index 6efa7d4652..5e107ebb41 100644
--- a/redash/handlers/alerts.py
+++ b/redash/handlers/alerts.py
@@ -1,83 +1,85 @@
-import time
-
from flask import request
from funcy import project
-from redash import models
-from redash.serializers import serialize_alert
-from redash.handlers.base import BaseResource, get_object_or_404, require_fields
+from redash import models, utils
+from redash.handlers.base import (
+ BaseResource,
+ get_object_or_404,
+ require_fields,
+)
from redash.permissions import (
require_access,
require_admin_or_owner,
require_permission,
view_only,
)
-from redash.utils import json_dumps
+from redash.serializers import serialize_alert
+from redash.tasks.alerts import (
+ notify_subscriptions,
+ should_notify,
+)
class AlertResource(BaseResource):
def get(self, alert_id):
- alert = get_object_or_404(
- models.Alert.get_by_id_and_org, alert_id, self.current_org
- )
+ alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
require_access(alert, self.current_user, view_only)
- self.record_event(
- {"action": "view", "object_id": alert.id, "object_type": "alert"}
- )
+ self.record_event({"action": "view", "object_id": alert.id, "object_type": "alert"})
return serialize_alert(alert)
def post(self, alert_id):
req = request.get_json(True)
params = project(req, ("options", "name", "query_id", "rearm"))
- alert = get_object_or_404(
- models.Alert.get_by_id_and_org, alert_id, self.current_org
- )
+ alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
require_admin_or_owner(alert.user.id)
self.update_model(alert, params)
models.db.session.commit()
- self.record_event(
- {"action": "edit", "object_id": alert.id, "object_type": "alert"}
- )
+ self.record_event({"action": "edit", "object_id": alert.id, "object_type": "alert"})
return serialize_alert(alert)
def delete(self, alert_id):
- alert = get_object_or_404(
- models.Alert.get_by_id_and_org, alert_id, self.current_org
- )
+ alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
require_admin_or_owner(alert.user_id)
models.db.session.delete(alert)
models.db.session.commit()
+class AlertEvaluateResource(BaseResource):
+ def post(self, alert_id):
+ alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
+ require_admin_or_owner(alert.user.id)
+
+ new_state = alert.evaluate()
+ if should_notify(alert, new_state):
+ alert.state = new_state
+ alert.last_triggered_at = utils.utcnow()
+ models.db.session.commit()
+
+ notify_subscriptions(alert, new_state, {})
+ self.record_event({"action": "evaluate", "object_id": alert.id, "object_type": "alert"})
+
+
class AlertMuteResource(BaseResource):
def post(self, alert_id):
- alert = get_object_or_404(
- models.Alert.get_by_id_and_org, alert_id, self.current_org
- )
+ alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
require_admin_or_owner(alert.user.id)
alert.options["muted"] = True
models.db.session.commit()
- self.record_event(
- {"action": "mute", "object_id": alert.id, "object_type": "alert"}
- )
+ self.record_event({"action": "mute", "object_id": alert.id, "object_type": "alert"})
def delete(self, alert_id):
- alert = get_object_or_404(
- models.Alert.get_by_id_and_org, alert_id, self.current_org
- )
+ alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
require_admin_or_owner(alert.user.id)
alert.options["muted"] = False
models.db.session.commit()
- self.record_event(
- {"action": "unmute", "object_id": alert.id, "object_type": "alert"}
- )
+ self.record_event({"action": "unmute", "object_id": alert.id, "object_type": "alert"})
class AlertListResource(BaseResource):
@@ -100,19 +102,14 @@ def post(self):
models.db.session.flush()
models.db.session.commit()
- self.record_event(
- {"action": "create", "object_id": alert.id, "object_type": "alert"}
- )
+ self.record_event({"action": "create", "object_id": alert.id, "object_type": "alert"})
return serialize_alert(alert)
@require_permission("list_alerts")
def get(self):
self.record_event({"action": "list", "object_type": "alert"})
- return [
- serialize_alert(alert)
- for alert in models.Alert.all(group_ids=self.current_user.group_ids)
- ]
+ return [serialize_alert(alert) for alert in models.Alert.all(group_ids=self.current_user.group_ids)]
class AlertSubscriptionListResource(BaseResource):
@@ -124,9 +121,7 @@ def post(self, alert_id):
kwargs = {"alert": alert, "user": self.current_user}
if "destination_id" in req:
- destination = models.NotificationDestination.get_by_id_and_org(
- req["destination_id"], self.current_org
- )
+ destination = models.NotificationDestination.get_by_id_and_org(req["destination_id"], self.current_org)
kwargs["destination"] = destination
subscription = models.AlertSubscription(**kwargs)
@@ -160,6 +155,4 @@ def delete(self, alert_id, subscriber_id):
models.db.session.delete(subscription)
models.db.session.commit()
- self.record_event(
- {"action": "unsubscribe", "object_id": alert_id, "object_type": "alert"}
- )
+ self.record_event({"action": "unsubscribe", "object_id": alert_id, "object_type": "alert"})
diff --git a/redash/handlers/api.py b/redash/handlers/api.py
index f4ba56421e..1898460512 100644
--- a/redash/handlers/api.py
+++ b/redash/handlers/api.py
@@ -3,20 +3,22 @@
from werkzeug.wrappers import Response
from redash.handlers.alerts import (
+ AlertEvaluateResource,
AlertListResource,
- AlertResource,
AlertMuteResource,
+ AlertResource,
AlertSubscriptionListResource,
AlertSubscriptionResource,
)
from redash.handlers.base import org_scoped_rule
from redash.handlers.dashboards import (
- MyDashboardsResource,
DashboardFavoriteListResource,
+ DashboardForkResource,
DashboardListResource,
DashboardResource,
DashboardShareResource,
DashboardTagsResource,
+ MyDashboardsResource,
PublicDashboardResource,
)
from redash.handlers.data_sources import (
@@ -38,7 +40,10 @@
DestinationTypeListResource,
)
from redash.handlers.events import EventsResource
-from redash.handlers.favorites import DashboardFavoriteResource, QueryFavoriteResource
+from redash.handlers.favorites import (
+ DashboardFavoriteResource,
+ QueryFavoriteResource,
+)
from redash.handlers.groups import (
GroupDataSourceListResource,
GroupDataSourceResource,
@@ -59,15 +64,15 @@
QueryListResource,
QueryRecentResource,
QueryRefreshResource,
+ QueryRegenerateApiKeyResource,
QueryResource,
QuerySearchResource,
QueryTagsResource,
- QueryRegenerateApiKeyResource,
)
from redash.handlers.query_results import (
JobResource,
- QueryResultDropdownResource,
QueryDropdownsResource,
+ QueryResultDropdownResource,
QueryResultListResource,
QueryResultResource,
)
@@ -112,9 +117,8 @@ def json_representation(data, code, headers=None):
api.add_org_resource(AlertResource, "/api/alerts/", endpoint="alert")
-api.add_org_resource(
- AlertMuteResource, "/api/alerts//mute", endpoint="alert_mute"
-)
+api.add_org_resource(AlertMuteResource, "/api/alerts//mute", endpoint="alert_mute")
+api.add_org_resource(AlertEvaluateResource, "/api/alerts//eval", endpoint="alert_eval")
api.add_org_resource(
AlertSubscriptionListResource,
"/api/alerts//subscriptions",
@@ -128,9 +132,7 @@ def json_representation(data, code, headers=None):
api.add_org_resource(AlertListResource, "/api/alerts", endpoint="alerts")
api.add_org_resource(DashboardListResource, "/api/dashboards", endpoint="dashboards")
-api.add_org_resource(
- DashboardResource, "/api/dashboards/", endpoint="dashboard"
-)
+api.add_org_resource(DashboardResource, "/api/dashboards/", endpoint="dashboard")
api.add_org_resource(
PublicDashboardResource,
"/api/dashboards/public/",
@@ -142,18 +144,10 @@ def json_representation(data, code, headers=None):
endpoint="dashboard_share",
)
-api.add_org_resource(
- DataSourceTypeListResource, "/api/data_sources/types", endpoint="data_source_types"
-)
-api.add_org_resource(
- DataSourceListResource, "/api/data_sources", endpoint="data_sources"
-)
-api.add_org_resource(
- DataSourceSchemaResource, "/api/data_sources//schema"
-)
-api.add_org_resource(
- DatabricksDatabaseListResource, "/api/databricks/databases/"
-)
+api.add_org_resource(DataSourceTypeListResource, "/api/data_sources/types", endpoint="data_source_types")
+api.add_org_resource(DataSourceListResource, "/api/data_sources", endpoint="data_sources")
+api.add_org_resource(DataSourceSchemaResource, "/api/data_sources//schema")
+api.add_org_resource(DatabricksDatabaseListResource, "/api/databricks/databases/")
api.add_org_resource(
DatabricksSchemaResource,
"/api/databricks/databases///tables",
@@ -162,19 +156,13 @@ def json_representation(data, code, headers=None):
DatabricksTableColumnListResource,
"/api/databricks/databases///columns/",
)
-api.add_org_resource(
- DataSourcePauseResource, "/api/data_sources//pause"
-)
+api.add_org_resource(DataSourcePauseResource, "/api/data_sources//pause")
api.add_org_resource(DataSourceTestResource, "/api/data_sources//test")
-api.add_org_resource(
- DataSourceResource, "/api/data_sources/", endpoint="data_source"
-)
+api.add_org_resource(DataSourceResource, "/api/data_sources/", endpoint="data_source")
api.add_org_resource(GroupListResource, "/api/groups", endpoint="groups")
api.add_org_resource(GroupResource, "/api/groups/", endpoint="group")
-api.add_org_resource(
- GroupMemberListResource, "/api/groups//members", endpoint="group_members"
-)
+api.add_org_resource(GroupMemberListResource, "/api/groups//members", endpoint="group_members")
api.add_org_resource(
GroupMemberResource,
"/api/groups//members/",
@@ -193,12 +181,8 @@ def json_representation(data, code, headers=None):
api.add_org_resource(EventsResource, "/api/events", endpoint="events")
-api.add_org_resource(
- QueryFavoriteListResource, "/api/queries/favorites", endpoint="query_favorites"
-)
-api.add_org_resource(
- QueryFavoriteResource, "/api/queries//favorite", endpoint="query_favorite"
-)
+api.add_org_resource(QueryFavoriteListResource, "/api/queries/favorites", endpoint="query_favorites")
+api.add_org_resource(QueryFavoriteResource, "/api/queries//favorite", endpoint="query_favorite")
api.add_org_resource(
DashboardFavoriteListResource,
"/api/dashboards/favorites",
@@ -209,32 +193,21 @@ def json_representation(data, code, headers=None):
"/api/dashboards//favorite",
endpoint="dashboard_favorite",
)
+api.add_org_resource(DashboardForkResource, "/api/dashboards//fork", endpoint="dashboard_fork")
api.add_org_resource(MyDashboardsResource, "/api/dashboards/my", endpoint="my_dashboards")
api.add_org_resource(QueryTagsResource, "/api/queries/tags", endpoint="query_tags")
-api.add_org_resource(
- DashboardTagsResource, "/api/dashboards/tags", endpoint="dashboard_tags"
-)
+api.add_org_resource(DashboardTagsResource, "/api/dashboards/tags", endpoint="dashboard_tags")
-api.add_org_resource(
- QuerySearchResource, "/api/queries/search", endpoint="queries_search"
-)
-api.add_org_resource(
- QueryRecentResource, "/api/queries/recent", endpoint="recent_queries"
-)
-api.add_org_resource(
- QueryArchiveResource, "/api/queries/archive", endpoint="queries_archive"
-)
+api.add_org_resource(QuerySearchResource, "/api/queries/search", endpoint="queries_search")
+api.add_org_resource(QueryRecentResource, "/api/queries/recent", endpoint="recent_queries")
+api.add_org_resource(QueryArchiveResource, "/api/queries/archive", endpoint="queries_archive")
api.add_org_resource(QueryListResource, "/api/queries", endpoint="queries")
api.add_org_resource(MyQueriesResource, "/api/queries/my", endpoint="my_queries")
-api.add_org_resource(
- QueryRefreshResource, "/api/queries//refresh", endpoint="query_refresh"
-)
+api.add_org_resource(QueryRefreshResource, "/api/queries//refresh", endpoint="query_refresh")
api.add_org_resource(QueryResource, "/api/queries/", endpoint="query")
-api.add_org_resource(
- QueryForkResource, "/api/queries//fork", endpoint="query_fork"
-)
+api.add_org_resource(QueryForkResource, "/api/queries//fork", endpoint="query_fork")
api.add_org_resource(
QueryRegenerateApiKeyResource,
"/api/queries//regenerate_api_key",
@@ -252,9 +225,7 @@ def json_representation(data, code, headers=None):
endpoint="check_permissions",
)
-api.add_org_resource(
- QueryResultListResource, "/api/query_results", endpoint="query_results"
-)
+api.add_org_resource(QueryResultListResource, "/api/query_results", endpoint="query_results")
api.add_org_resource(
QueryResultDropdownResource,
"/api/queries//dropdown",
@@ -283,9 +254,7 @@ def json_representation(data, code, headers=None):
api.add_org_resource(UserListResource, "/api/users", endpoint="users")
api.add_org_resource(UserResource, "/api/users/", endpoint="user")
-api.add_org_resource(
- UserInviteResource, "/api/users//invite", endpoint="user_invite"
-)
+api.add_org_resource(UserInviteResource, "/api/users//invite", endpoint="user_invite")
api.add_org_resource(
UserResetPasswordResource,
"/api/users//reset_password",
@@ -296,13 +265,9 @@ def json_representation(data, code, headers=None):
"/api/users//regenerate_api_key",
endpoint="user_regenerate_api_key",
)
-api.add_org_resource(
- UserDisableResource, "/api/users//disable", endpoint="user_disable"
-)
+api.add_org_resource(UserDisableResource, "/api/users//disable", endpoint="user_disable")
-api.add_org_resource(
- VisualizationListResource, "/api/visualizations", endpoint="visualizations"
-)
+api.add_org_resource(VisualizationListResource, "/api/visualizations", endpoint="visualizations")
api.add_org_resource(
VisualizationResource,
"/api/visualizations/",
@@ -312,22 +277,12 @@ def json_representation(data, code, headers=None):
api.add_org_resource(WidgetListResource, "/api/widgets", endpoint="widgets")
api.add_org_resource(WidgetResource, "/api/widgets/", endpoint="widget")
-api.add_org_resource(
- DestinationTypeListResource, "/api/destinations/types", endpoint="destination_types"
-)
-api.add_org_resource(
- DestinationResource, "/api/destinations/", endpoint="destination"
-)
-api.add_org_resource(
- DestinationListResource, "/api/destinations", endpoint="destinations"
-)
+api.add_org_resource(DestinationTypeListResource, "/api/destinations/types", endpoint="destination_types")
+api.add_org_resource(DestinationResource, "/api/destinations/", endpoint="destination")
+api.add_org_resource(DestinationListResource, "/api/destinations", endpoint="destinations")
-api.add_org_resource(
- QuerySnippetResource, "/api/query_snippets/", endpoint="query_snippet"
-)
-api.add_org_resource(
- QuerySnippetListResource, "/api/query_snippets", endpoint="query_snippets"
-)
+api.add_org_resource(QuerySnippetResource, "/api/query_snippets/", endpoint="query_snippet")
+api.add_org_resource(QuerySnippetListResource, "/api/query_snippets", endpoint="query_snippets")
api.add_org_resource(
OrganizationSettings, "/api/settings/organization", endpoint="organization_settings"
diff --git a/redash/handlers/authentication.py b/redash/handlers/authentication.py
index 948e3aa2c4..45db380083 100644
--- a/redash/handlers/authentication.py
+++ b/redash/handlers/authentication.py
@@ -1,13 +1,13 @@
import logging
from flask import abort, flash, redirect, render_template, request, url_for
-
from flask_login import current_user, login_required, login_user, logout_user
+from itsdangerous import BadSignature, SignatureExpired
+from sqlalchemy.orm.exc import NoResultFound
+
from redash import __version__, limiter, models, settings
from redash.authentication import current_org, get_login_url, get_next_path
from redash.authentication.account import (
- BadSignature,
- SignatureExpired,
send_password_reset_email,
send_user_disabled_email,
send_verify_email,
@@ -15,47 +15,44 @@
)
from redash.handlers import routes
from redash.handlers.base import json_response, org_scoped_rule
-from redash.version_check import get_latest_version
-from sqlalchemy.orm.exc import NoResultFound
logger = logging.getLogger(__name__)
def get_google_auth_url(next_path):
if settings.MULTI_ORG:
- google_auth_url = url_for(
- "google_oauth.authorize_org", next=next_path, org_slug=current_org.slug
- )
+ google_auth_url = url_for("google_oauth.authorize_org", next=next_path, org_slug=current_org.slug)
else:
google_auth_url = url_for("google_oauth.authorize", next=next_path)
return google_auth_url
def render_token_login_page(template, org_slug, token, invite):
+ error_message = None
try:
user_id = validate_token(token)
org = current_org._get_current_object()
user = models.User.get_by_id_and_org(user_id, org)
except NoResultFound:
logger.exception(
- "Bad user id in token. Token= , User id= %s, Org=%s",
- user_id,
+ "Bad user id in token. Token=%s , User id= %s, Org=%s",
token,
+ user_id,
org_slug,
)
+ error_message = "Your invite link is invalid. Bad user id in token. Please ask for a new one."
+ except SignatureExpired:
+ logger.exception("Token signature has expired. Token: %s, org=%s", token, org_slug)
+ error_message = "Your invite link has expired. Please ask for a new one."
+ except BadSignature:
+ logger.exception("Bad signature for the token: %s, org=%s", token, org_slug)
+ error_message = "Your invite link is invalid. Bad signature. Please double-check the token."
+
+ if error_message:
return (
render_template(
"error.html",
- error_message="Invalid invite link. Please ask for a new one.",
- ),
- 400,
- )
- except (SignatureExpired, BadSignature):
- logger.exception("Failed to verify invite token: %s, org=%s", token, org_slug)
- return (
- render_template(
- "error.html",
- error_message="Your invite link has expired. Please ask for a new one.",
+ error_message=error_message,
),
400,
)
@@ -65,8 +62,7 @@ def render_token_login_page(template, org_slug, token, invite):
render_template(
"error.html",
error_message=(
- "This invitation has already been accepted. "
- "Please try resetting your password instead."
+ "This invitation has already been accepted. Please try resetting your password instead."
),
),
400,
@@ -126,9 +122,7 @@ def verify(token, org_slug=None):
org = current_org._get_current_object()
user = models.User.get_by_id_and_org(user_id, org)
except (BadSignature, NoResultFound):
- logger.exception(
- "Failed to verify email verification token: %s, org=%s", token, org_slug
- )
+ logger.exception("Failed to verify email verification token: %s, org=%s", token, org_slug)
return (
render_template(
"error.html",
@@ -175,11 +169,7 @@ def verification_email(org_slug=None):
if not current_user.is_email_verified:
send_verify_email(current_user, current_org)
- return json_response(
- {
- "message": "Please check your email inbox in order to verify your email address."
- }
- )
+ return json_response({"message": "Please check your email inbox in order to verify your email address."})
@routes.route(org_scoped_rule("/login"), methods=["GET", "POST"])
@@ -187,9 +177,9 @@ def verification_email(org_slug=None):
def login(org_slug=None):
# We intentionally use == as otherwise it won't actually use the proxy. So weird :O
# noinspection PyComparisonWithNone
- if current_org == None and not settings.MULTI_ORG:
+ if current_org == None and not settings.MULTI_ORG: # noqa: E711
return redirect("/setup")
- elif current_org == None:
+ elif current_org == None: # noqa: E711
return redirect("/")
index_url = url_for("redash.index", org_slug=org_slug)
@@ -198,15 +188,11 @@ def login(org_slug=None):
if current_user.is_authenticated:
return redirect(next_path)
- if request.method == "POST":
+ if request.method == "POST" and current_org.get_setting("auth_password_login_enabled"):
try:
org = current_org._get_current_object()
user = models.User.get_by_email_and_org(request.form["email"], org)
- if (
- user
- and not user.is_disabled
- and user.verify_password(request.form["password"])
- ):
+ if user and not user.is_disabled and user.verify_password(request.form["password"]):
remember = "remember" in request.form
login_user(user, remember=remember)
return redirect(next_path)
@@ -214,6 +200,8 @@ def login(org_slug=None):
flash("Wrong email or password.")
except NoResultFound:
flash("Wrong email or password.")
+ elif request.method == "POST" and not current_org.get_setting("auth_password_login_enabled"):
+ flash("Password login is not enabled for your organization.")
google_auth_url = get_google_auth_url(next_path)
@@ -268,27 +256,16 @@ def number_format_config():
def client_config():
if not current_user.is_api_user() and current_user.is_authenticated:
- client_config = {
- "newVersionAvailable": bool(get_latest_version()),
+ client_config_inner = {
"version": __version__,
}
else:
- client_config = {}
-
- if (
- current_user.has_permission("admin")
- and current_org.get_setting("beacon_consent") is None
- ):
- client_config["showBeaconConsentMessage"] = True
+ client_config_inner = {}
defaults = {
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
- "showPermissionsControl": current_org.get_setting(
- "feature_show_permissions_control"
- ),
- "hidePlotlyModeBar": current_org.get_setting(
- "hide_plotly_mode_bar"
- ),
+ "showPermissionsControl": current_org.get_setting("feature_show_permissions_control"),
+ "hidePlotlyModeBar": current_org.get_setting("hide_plotly_mode_bar"),
"disablePublicUrls": current_org.get_setting("disable_public_urls"),
"allowCustomJSVisualizations": settings.FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS,
"autoPublishNamedQueries": settings.FEATURE_AUTO_PUBLISH_NAMED_QUERIES,
@@ -303,12 +280,12 @@ def client_config():
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
}
- client_config.update(defaults)
- client_config.update({"basePath": base_href()})
- client_config.update(date_time_format_config())
- client_config.update(number_format_config())
+ client_config_inner.update(defaults)
+ client_config_inner.update({"basePath": base_href()})
+ client_config_inner.update(date_time_format_config())
+ client_config_inner.update(number_format_config())
- return client_config
+ return client_config_inner
def messages():
@@ -325,9 +302,7 @@ def messages():
@routes.route("/api/config", methods=["GET"])
def config(org_slug=None):
- return json_response(
- {"org_slug": current_org.slug, "client_config": client_config()}
- )
+ return json_response({"org_slug": current_org.slug, "client_config": client_config()})
@routes.route(org_scoped_rule("/api/session"), methods=["GET"])
diff --git a/redash/handlers/base.py b/redash/handlers/base.py
index dfe64f66ad..315bab536f 100644
--- a/redash/handlers/base.py
+++ b/redash/handlers/base.py
@@ -1,23 +1,21 @@
import time
-
from inspect import isclass
-from flask import Blueprint, current_app, request
+from flask import Blueprint, current_app, request
from flask_login import current_user, login_required
from flask_restful import Resource, abort
+from sqlalchemy import cast
+from sqlalchemy.dialects.postgresql import ARRAY
+from sqlalchemy.orm.exc import NoResultFound
+
from redash import settings
from redash.authentication import current_org
from redash.models import db
from redash.tasks import record_event as record_event_task
from redash.utils import json_dumps
-from sqlalchemy.orm.exc import NoResultFound
-from sqlalchemy import cast
-from sqlalchemy.dialects import postgresql
-from sqlalchemy_utils import sort_query
+from redash.utils.query_order import sort_query
-routes = Blueprint(
- "redash", __name__, template_folder=settings.fix_assets_path("templates")
-)
+routes = Blueprint("redash", __name__, template_folder=settings.fix_assets_path("templates"))
class BaseResource(Resource):
@@ -124,9 +122,7 @@ def json_response(response):
def filter_by_tags(result_set, column):
if request.args.getlist("tags"):
tags = request.args.getlist("tags")
- result_set = result_set.filter(
- cast(column, postgresql.ARRAY(db.Text)).contains(tags)
- )
+ result_set = result_set.filter(cast(column, ARRAY(db.Text)).contains(tags))
return result_set
diff --git a/redash/handlers/dashboards.py b/redash/handlers/dashboards.py
index 0e5524a6f0..297311f744 100644
--- a/redash/handlers/dashboards.py
+++ b/redash/handlers/dashboards.py
@@ -1,16 +1,17 @@
from flask import request, url_for
-from funcy import project, partial
-
from flask_restful import abort
+from funcy import partial, project
+from sqlalchemy.orm.exc import StaleDataError
+
from redash import models
from redash.handlers.base import (
BaseResource,
+ filter_by_tags,
get_object_or_404,
get_object,
paginate,
- filter_by_tags,
- order_results as _order_results,
)
+from redash.handlers.base import order_results as _order_results
from redash.permissions import (
can_modify,
require_admin_or_owner,
@@ -43,9 +44,7 @@
"-created_at": "-created_at",
}
-order_results = partial(
- _order_results, default_order="-created_at", allowed_orders=order_map
-)
+order_results = partial(_order_results, default_order="-created_at", allowed_orders=order_map)
class DashboardListResource(BaseResource):
@@ -72,9 +71,7 @@ def get(self):
search_term,
)
else:
- results = models.Dashboard.all(
- self.current_org, self.current_user.group_ids, self.current_user.id
- )
+ results = models.Dashboard.all(self.current_org, self.current_user.group_ids, self.current_user.id)
results = filter_by_tags(results, models.Dashboard.tags)
@@ -94,9 +91,7 @@ def get(self):
)
if search_term:
- self.record_event(
- {"action": "search", "object_type": "dashboard", "term": search_term}
- )
+ self.record_event({"action": "search", "object_type": "dashboard", "term": search_term})
else:
self.record_event({"action": "list", "object_type": "dashboard"})
@@ -117,7 +112,7 @@ def post(self):
org=self.current_org,
user=self.current_user,
is_draft=True,
- layout="[]",
+ layout=[],
)
models.db.session.add(dashboard)
models.db.session.commit()
@@ -153,12 +148,7 @@ def get(self):
page = request.args.get("page", 1, type=int)
page_size = request.args.get("page_size", 25, type=int)
- return paginate(
- ordered_results,
- page,
- page_size,
- DashboardSerializer
- )
+ return paginate(ordered_results, page, page_size, DashboardSerializer)
class DashboardResource(BaseResource):
@@ -204,9 +194,7 @@ def get(self, dashboard_id=None):
fn = models.Dashboard.get_by_id_and_org
return_dynamic_key = request.args.get("return_dynamic_key", "false").lower() == 'true'
dashboard = get_object_or_404(fn, dashboard_id, self.current_org)
- response = DashboardSerializer(
- dashboard, with_widgets=True, user=self.current_user
- ).serialize()
+ response = DashboardSerializer(dashboard, with_widgets=True, user=self.current_user).serialize()
api_key = models.ApiKey.get_by_object(dashboard)
# If the dashboard has api_key then use the default settings; else create a new token with salt.
@@ -241,9 +229,7 @@ def get(self, dashboard_id=None):
response["can_edit"] = can_modify(dashboard, self.current_user)
- self.record_event(
- {"action": "view", "object_id": dashboard.id, "object_type": "dashboard"}
- )
+ self.record_event({"action": "view", "object_id": dashboard.id, "object_type": "dashboard"})
return response
@@ -294,13 +280,9 @@ def post(self, dashboard_id):
except StaleDataError:
abort(409)
- result = DashboardSerializer(
- dashboard, with_widgets=True, user=self.current_user
- ).serialize()
+ result = DashboardSerializer(dashboard, with_widgets=True, user=self.current_user).serialize()
- self.record_event(
- {"action": "edit", "object_id": dashboard.id, "object_type": "dashboard"}
- )
+ self.record_event({"action": "edit", "object_id": dashboard.id, "object_type": "dashboard"})
return result
@@ -317,14 +299,10 @@ def delete(self, dashboard_id):
dashboard.is_archived = True
dashboard.record_changes(changed_by=self.current_user)
models.db.session.add(dashboard)
- d = DashboardSerializer(
- dashboard, with_widgets=True, user=self.current_user
- ).serialize()
+ d = DashboardSerializer(dashboard, with_widgets=True, user=self.current_user).serialize()
models.db.session.commit()
- self.record_event(
- {"action": "archive", "object_id": dashboard.id, "object_type": "dashboard"}
- )
+ self.record_event({"action": "archive", "object_id": dashboard.id, "object_type": "dashboard"})
return d
@@ -432,9 +410,7 @@ def get(self):
self.current_user.id,
search_term,
)
- favorites = models.Dashboard.favorites(
- self.current_user, base_query=base_query
- )
+ favorites = models.Dashboard.favorites(self.current_user, base_query=base_query)
else:
favorites = models.Dashboard.favorites(self.current_user)
@@ -462,4 +438,4 @@ def get(self):
}
)
- return response
\ No newline at end of file
+ return response
diff --git a/redash/handlers/data_sources.py b/redash/handlers/data_sources.py
index 5ff03da140..49a728152b 100644
--- a/redash/handlers/data_sources.py
+++ b/redash/handlers/data_sources.py
@@ -7,7 +7,11 @@
from sqlalchemy.exc import IntegrityError
from redash import models
-from redash.handlers.base import BaseResource, get_object_or_404, require_fields
+from redash.handlers.base import (
+ BaseResource,
+ get_object_or_404,
+ require_fields,
+)
from redash.permissions import (
require_access,
require_admin,
@@ -17,8 +21,9 @@
from redash.query_runner import (
get_configuration_schema_for_query_runner_type,
query_runners,
- NotSupported,
)
+from redash.serializers import serialize_job
+from redash.tasks.general import get_schema, test_connection
from redash.utils import filter_none
from redash.utils.configuration import ConfigurationContainer, ValidationError
from redash.tasks.general import test_connection, get_schema
@@ -31,17 +36,12 @@
class DataSourceTypeListResource(BaseResource):
@require_admin
def get(self):
- return [
- q.to_dict()
- for q in sorted(query_runners.values(), key=lambda q: q.name().lower())
- ]
+ return [q.to_dict() for q in sorted(query_runners.values(), key=lambda q: q.name().lower())]
class DataSourceResource(BaseResource):
def get(self, data_source_id):
- data_source = get_object_or_404(
- models.DataSource.get_by_id_and_org, data_source_id, self.current_org
- )
+ data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org)
require_access(data_source, self.current_user, view_only)
ds = {}
@@ -50,19 +50,13 @@ def get(self, data_source_id):
ds = data_source.to_dict(all=self.current_user.has_permission("admin"))
# add view_only info, required for frontend permissions
- ds["view_only"] = all(
- project(data_source.groups, self.current_user.group_ids).values()
- )
- self.record_event(
- {"action": "view", "object_id": data_source_id, "object_type": "datasource"}
- )
+ ds["view_only"] = all(project(data_source.groups, self.current_user.group_ids).values())
+ self.record_event({"action": "view", "object_id": data_source_id, "object_type": "datasource"})
return ds
@require_admin
def post(self, data_source_id):
- data_source = models.DataSource.get_by_id_and_org(
- data_source_id, self.current_org
- )
+ data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
req = request.get_json(True)
schema = get_configuration_schema_for_query_runner_type(req["type"])
@@ -84,24 +78,18 @@ def post(self, data_source_id):
if req["name"] in str(e):
abort(
400,
- message="Data source with the name {} already exists.".format(
- req["name"]
- ),
+ message="Data source with the name {} already exists.".format(req["name"]),
)
abort(400)
- self.record_event(
- {"action": "edit", "object_id": data_source.id, "object_type": "datasource"}
- )
+ self.record_event({"action": "edit", "object_id": data_source.id, "object_type": "datasource"})
return data_source.to_dict(all=True)
@require_admin
def delete(self, data_source_id):
- data_source = models.DataSource.get_by_id_and_org(
- data_source_id, self.current_org
- )
+ data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
data_source.delete()
self.record_event(
@@ -121,9 +109,7 @@ def get(self):
if self.current_user.has_permission("admin"):
data_sources = models.DataSource.all(self.current_org)
else:
- data_sources = models.DataSource.all(
- self.current_org, group_ids=self.current_user.group_ids
- )
+ data_sources = models.DataSource.all(self.current_org, group_ids=self.current_user.group_ids)
response = {}
for ds in data_sources:
@@ -132,14 +118,10 @@ def get(self):
try:
d = ds.to_dict()
- d["view_only"] = all(
- project(ds.groups, self.current_user.group_ids).values()
- )
+ d["view_only"] = all(project(ds.groups, self.current_user.group_ids).values())
response[ds.id] = d
except AttributeError:
- logging.exception(
- "Error with DataSource#to_dict (data source id: %d)", ds.id
- )
+ logging.exception("Error with DataSource#to_dict (data source id: %d)", ds.id)
self.record_event(
{
@@ -177,9 +159,7 @@ def post(self):
if req["name"] in str(e):
abort(
400,
- message="Data source with the name {} already exists.".format(
- req["name"]
- ),
+ message="Data source with the name {} already exists.".format(req["name"]),
)
abort(400)
@@ -197,9 +177,7 @@ def post(self):
class DataSourceSchemaResource(BaseResource):
def get(self, data_source_id):
- data_source = get_object_or_404(
- models.DataSource.get_by_id_and_org, data_source_id, self.current_org
- )
+ data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org)
require_access(data_source, self.current_user, view_only)
refresh = request.args.get("refresh") is not None
@@ -217,9 +195,7 @@ def get(self, data_source_id):
class DataSourcePauseResource(BaseResource):
@require_admin
def post(self, data_source_id):
- data_source = get_object_or_404(
- models.DataSource.get_by_id_and_org, data_source_id, self.current_org
- )
+ data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org)
data = request.get_json(force=True, silent=True)
if data:
reason = data.get("reason")
@@ -239,9 +215,7 @@ def post(self, data_source_id):
@require_admin
def delete(self, data_source_id):
- data_source = get_object_or_404(
- models.DataSource.get_by_id_and_org, data_source_id, self.current_org
- )
+ data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org)
data_source.resume()
self.record_event(
@@ -257,9 +231,7 @@ def delete(self, data_source_id):
class DataSourceTestResource(BaseResource):
@require_admin
def post(self, data_source_id):
- data_source = get_object_or_404(
- models.DataSource.get_by_id_and_org, data_source_id, self.current_org
- )
+ data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org)
response = {}
diff --git a/redash/handlers/databricks.py b/redash/handlers/databricks.py
index 0c36bc74be..3b8b43305f 100644
--- a/redash/handlers/databricks.py
+++ b/redash/handlers/databricks.py
@@ -1,25 +1,21 @@
-from flask_restful import abort
from flask import request
+from flask_restful import abort
+
from redash import models, redis_connection
from redash.handlers.base import BaseResource, get_object_or_404
-from redash.permissions import (
- require_access,
- view_only,
-)
+from redash.permissions import require_access, view_only
+from redash.serializers import serialize_job
from redash.tasks.databricks import (
- get_databricks_databases,
- get_databricks_tables,
get_database_tables_with_columns,
+ get_databricks_databases,
get_databricks_table_columns,
+ get_databricks_tables,
)
-from redash.serializers import serialize_job
-from redash.utils import json_loads, json_dumps
+from redash.utils import json_loads
def _get_databricks_data_source(data_source_id, user, org):
- data_source = get_object_or_404(
- models.DataSource.get_by_id_and_org, data_source_id, org
- )
+ data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, org)
require_access(data_source, user, view_only)
if not data_source.type == "databricks":
@@ -48,9 +44,7 @@ def _get_tables_from_cache(data_source_id, database_name):
class DatabricksDatabaseListResource(BaseResource):
def get(self, data_source_id):
- data_source = _get_databricks_data_source(
- data_source_id, user=self.current_user, org=self.current_org
- )
+ data_source = _get_databricks_data_source(data_source_id, user=self.current_user, org=self.current_org)
refresh = request.args.get("refresh") is not None
if not refresh:
@@ -59,17 +53,13 @@ def get(self, data_source_id):
if cached_databases is not None:
return cached_databases
- job = get_databricks_databases.delay(
- data_source.id, redis_key=_databases_key(data_source_id)
- )
+ job = get_databricks_databases.delay(data_source.id, redis_key=_databases_key(data_source_id))
return serialize_job(job)
class DatabricksSchemaResource(BaseResource):
def get(self, data_source_id, database_name):
- data_source = _get_databricks_data_source(
- data_source_id, user=self.current_user, org=self.current_org
- )
+ data_source = _get_databricks_data_source(data_source_id, user=self.current_user, org=self.current_org)
refresh = request.args.get("refresh") is not None
if not refresh:
@@ -89,9 +79,7 @@ def get(self, data_source_id, database_name):
class DatabricksTableColumnListResource(BaseResource):
def get(self, data_source_id, database_name, table_name):
- data_source = _get_databricks_data_source(
- data_source_id, user=self.current_user, org=self.current_org
- )
+ data_source = _get_databricks_data_source(data_source_id, user=self.current_user, org=self.current_org)
job = get_databricks_table_columns.delay(data_source.id, database_name, table_name)
return serialize_job(job)
diff --git a/redash/handlers/destinations.py b/redash/handlers/destinations.py
index e935ed84bb..15a80865d1 100644
--- a/redash/handlers/destinations.py
+++ b/redash/handlers/destinations.py
@@ -21,9 +21,7 @@ def get(self):
class DestinationResource(BaseResource):
@require_admin
def get(self, destination_id):
- destination = models.NotificationDestination.get_by_id_and_org(
- destination_id, self.current_org
- )
+ destination = models.NotificationDestination.get_by_id_and_org(destination_id, self.current_org)
d = destination.to_dict(all=True)
self.record_event(
{
@@ -36,9 +34,7 @@ def get(self, destination_id):
@require_admin
def post(self, destination_id):
- destination = models.NotificationDestination.get_by_id_and_org(
- destination_id, self.current_org
- )
+ destination = models.NotificationDestination.get_by_id_and_org(destination_id, self.current_org)
req = request.get_json(True)
schema = get_configuration_schema_for_destination_type(req["type"])
@@ -58,9 +54,7 @@ def post(self, destination_id):
if "name" in str(e):
abort(
400,
- message="Alert Destination with the name {} already exists.".format(
- req["name"]
- ),
+ message="Alert Destination with the name {} already exists.".format(req["name"]),
)
abort(500)
@@ -68,9 +62,7 @@ def post(self, destination_id):
@require_admin
def delete(self, destination_id):
- destination = models.NotificationDestination.get_by_id_and_org(
- destination_id, self.current_org
- )
+ destination = models.NotificationDestination.get_by_id_and_org(destination_id, self.current_org)
models.db.session.delete(destination)
models.db.session.commit()
@@ -135,9 +127,7 @@ def post(self):
if "name" in str(e):
abort(
400,
- message="Alert Destination with the name {} already exists.".format(
- req["name"]
- ),
+ message="Alert Destination with the name {} already exists.".format(req["name"]),
)
abort(500)
diff --git a/redash/handlers/embed.py b/redash/handlers/embed.py
index 190c3b6c54..826281b092 100644
--- a/redash/handlers/embed.py
+++ b/redash/handlers/embed.py
@@ -2,6 +2,7 @@
from .authentication import current_org
from flask_login import current_user, login_required
+
from redash import models
from redash.handlers import routes
@@ -13,6 +14,8 @@
logger = get_job_logger(__name__)
+from .authentication import current_org
+
@routes.route(
org_scoped_rule("/embed/query//visualization/"),
diff --git a/redash/handlers/events.py b/redash/handlers/events.py
index 6ecbe84758..f77a72e830 100644
--- a/redash/handlers/events.py
+++ b/redash/handlers/events.py
@@ -1,6 +1,6 @@
-from flask import request
import geolite2
import maxminddb
+from flask import request
from user_agents import parse as parse_ua
from redash.handlers.base import BaseResource, paginate
@@ -44,9 +44,7 @@ def serialize_event(event):
}
if event.user_id:
- d["user_name"] = event.additional_properties.get(
- "user_name", "User {}".format(event.user_id)
- )
+ d["user_name"] = event.additional_properties.get("user_name", "User {}".format(event.user_id))
if not event.user_id:
d["user_name"] = event.additional_properties.get("api_key", "Unknown")
diff --git a/redash/handlers/favorites.py b/redash/handlers/favorites.py
index 71a0ac3db1..796dfb6ab1 100644
--- a/redash/handlers/favorites.py
+++ b/redash/handlers/favorites.py
@@ -1,21 +1,16 @@
-from flask import request
from sqlalchemy.exc import IntegrityError
from redash import models
-from redash.handlers.base import BaseResource, get_object_or_404, paginate
+from redash.handlers.base import BaseResource, get_object_or_404
from redash.permissions import require_access, view_only
class QueryFavoriteResource(BaseResource):
def post(self, query_id):
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(query, self.current_user, view_only)
- fav = models.Favorite(
- org_id=self.current_org.id, object=query, user=self.current_user
- )
+ fav = models.Favorite(org_id=self.current_org.id, object=query, user=self.current_user)
models.db.session.add(fav)
try:
@@ -26,14 +21,10 @@ def post(self, query_id):
else:
raise e
- self.record_event(
- {"action": "favorite", "object_id": query.id, "object_type": "query"}
- )
+ self.record_event({"action": "favorite", "object_id": query.id, "object_type": "query"})
def delete(self, query_id):
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(query, self.current_user, view_only)
models.Favorite.query.filter(
@@ -43,19 +34,13 @@ def delete(self, query_id):
).delete()
models.db.session.commit()
- self.record_event(
- {"action": "favorite", "object_id": query.id, "object_type": "query"}
- )
+ self.record_event({"action": "favorite", "object_id": query.id, "object_type": "query"})
class DashboardFavoriteResource(BaseResource):
def post(self, object_id):
- dashboard = get_object_or_404(
- models.Dashboard.get_by_id_and_org, object_id, self.current_org
- )
- fav = models.Favorite(
- org_id=self.current_org.id, object=dashboard, user=self.current_user
- )
+ dashboard = get_object_or_404(models.Dashboard.get_by_id_and_org, object_id, self.current_org)
+ fav = models.Favorite(org_id=self.current_org.id, object=dashboard, user=self.current_user)
models.db.session.add(fav)
try:
@@ -75,9 +60,7 @@ def post(self, object_id):
)
def delete(self, object_id):
- dashboard = get_object_or_404(
- models.Dashboard.get_by_id_and_org, object_id, self.current_org
- )
+ dashboard = get_object_or_404(models.Dashboard.get_by_id_and_org, object_id, self.current_org)
models.Favorite.query.filter(
models.Favorite.object == dashboard,
models.Favorite.user == self.current_user,
diff --git a/redash/handlers/groups.py b/redash/handlers/groups.py
index 40839e0345..200c31507f 100644
--- a/redash/handlers/groups.py
+++ b/redash/handlers/groups.py
@@ -1,9 +1,9 @@
-import time
from flask import request
from flask_restful import abort
+
from redash import models
-from redash.permissions import require_admin, require_permission
from redash.handlers.base import BaseResource, get_object_or_404
+from redash.permissions import require_admin, require_permission
class GroupListResource(BaseResource):
@@ -14,9 +14,7 @@ def post(self):
models.db.session.add(group)
models.db.session.commit()
- self.record_event(
- {"action": "create", "object_id": group.id, "object_type": "group"}
- )
+ self.record_event({"action": "create", "object_id": group.id, "object_type": "group"})
return group.to_dict()
@@ -24,13 +22,9 @@ def get(self):
if self.current_user.has_permission("admin"):
groups = models.Group.all(self.current_org)
else:
- groups = models.Group.query.filter(
- models.Group.id.in_(self.current_user.group_ids)
- )
+ groups = models.Group.query.filter(models.Group.id.in_(self.current_user.group_ids))
- self.record_event(
- {"action": "list", "object_id": "groups", "object_type": "group"}
- )
+ self.record_event({"action": "list", "object_id": "groups", "object_type": "group"})
return [g.to_dict() for g in groups]
@@ -46,24 +40,17 @@ def post(self, group_id):
group.name = request.json["name"]
models.db.session.commit()
- self.record_event(
- {"action": "edit", "object_id": group.id, "object_type": "group"}
- )
+ self.record_event({"action": "edit", "object_id": group.id, "object_type": "group"})
return group.to_dict()
def get(self, group_id):
- if not (
- self.current_user.has_permission("admin")
- or int(group_id) in self.current_user.group_ids
- ):
+ if not (self.current_user.has_permission("admin") or int(group_id) in self.current_user.group_ids):
abort(403)
group = models.Group.get_by_id_and_org(group_id, self.current_org)
- self.record_event(
- {"action": "view", "object_id": group_id, "object_type": "group"}
- )
+ self.record_event({"action": "view", "object_id": group_id, "object_type": "group"})
return group.to_dict()
@@ -103,10 +90,7 @@ def post(self, group_id):
@require_permission("list_users")
def get(self, group_id):
- if not (
- self.current_user.has_permission("admin")
- or int(group_id) in self.current_user.group_ids
- ):
+ if not (self.current_user.has_permission("admin") or int(group_id) in self.current_user.group_ids):
abort(403)
members = models.Group.members(group_id)
@@ -140,9 +124,7 @@ class GroupDataSourceListResource(BaseResource):
@require_admin
def post(self, group_id):
data_source_id = request.json["data_source_id"]
- data_source = models.DataSource.get_by_id_and_org(
- data_source_id, self.current_org
- )
+ data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
group = models.Group.get_by_id_and_org(group_id, self.current_org)
data_source_group = data_source.add_group(group)
@@ -161,18 +143,14 @@ def post(self, group_id):
@require_admin
def get(self, group_id):
- group = get_object_or_404(
- models.Group.get_by_id_and_org, group_id, self.current_org
- )
+ group = get_object_or_404(models.Group.get_by_id_and_org, group_id, self.current_org)
# TOOD: move to models
data_sources = models.DataSource.query.join(models.DataSourceGroup).filter(
models.DataSourceGroup.group == group
)
- self.record_event(
- {"action": "list", "object_id": group_id, "object_type": "group"}
- )
+ self.record_event({"action": "list", "object_id": group_id, "object_type": "group"})
return [ds.to_dict(with_permissions_for=group) for ds in data_sources]
@@ -180,9 +158,7 @@ def get(self, group_id):
class GroupDataSourceResource(BaseResource):
@require_admin
def post(self, group_id, data_source_id):
- data_source = models.DataSource.get_by_id_and_org(
- data_source_id, self.current_org
- )
+ data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
group = models.Group.get_by_id_and_org(group_id, self.current_org)
view_only = request.json["view_only"]
@@ -203,9 +179,7 @@ def post(self, group_id, data_source_id):
@require_admin
def delete(self, group_id, data_source_id):
- data_source = models.DataSource.get_by_id_and_org(
- data_source_id, self.current_org
- )
+ data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
group = models.Group.get_by_id_and_org(group_id, self.current_org)
data_source.remove_group(group)
diff --git a/redash/handlers/organization.py b/redash/handlers/organization.py
index f39548f8ce..4fa004f5f6 100644
--- a/redash/handlers/organization.py
+++ b/redash/handlers/organization.py
@@ -1,9 +1,9 @@
from flask_login import current_user, login_required
from redash import models
+from redash.authentication import current_org
from redash.handlers import routes
from redash.handlers.base import json_response, org_scoped_rule
-from redash.authentication import current_org
@routes.route(org_scoped_rule("/api/organization/status"), methods=["GET"])
@@ -12,14 +12,10 @@ def organization_status(org_slug=None):
counters = {
"users": models.User.all(current_org).count(),
"alerts": models.Alert.all(group_ids=current_user.group_ids).count(),
- "data_sources": models.DataSource.all(
- current_org, group_ids=current_user.group_ids
- ).count(),
- "queries": models.Query.all_queries(
- current_user.group_ids, current_user.id, include_drafts=True
- ).count(),
+ "data_sources": models.DataSource.all(current_org, group_ids=current_user.group_ids).count(),
+ "queries": models.Query.all_queries(current_user.group_ids, current_user.id, include_drafts=True).count(),
"dashboards": models.Dashboard.query.filter(
- models.Dashboard.org == current_org, models.Dashboard.is_archived == False
+ models.Dashboard.org == current_org, models.Dashboard.is_archived.is_(False)
).count(),
}
diff --git a/redash/handlers/permissions.py b/redash/handlers/permissions.py
index 94bf111eb6..731ddcc3f5 100644
--- a/redash/handlers/permissions.py
+++ b/redash/handlers/permissions.py
@@ -1,12 +1,12 @@
from collections import defaultdict
-from redash.handlers.base import BaseResource, get_object_or_404
-from redash.models import AccessPermission, Query, Dashboard, User, db
-from redash.permissions import require_admin_or_owner, ACCESS_TYPES
from flask import request
from flask_restful import abort
from sqlalchemy.orm.exc import NoResultFound
+from redash.handlers.base import BaseResource, get_object_or_404
+from redash.models import AccessPermission, Dashboard, Query, User, db
+from redash.permissions import ACCESS_TYPES, require_admin_or_owner
model_to_types = {"queries": Query, "dashboards": Dashboard}
@@ -51,9 +51,7 @@ def post(self, object_type, object_id):
except NoResultFound:
abort(400, message="User not found.")
- permission = AccessPermission.grant(
- obj, access_type, grantee, self.current_user
- )
+ permission = AccessPermission.grant(obj, access_type, grantee, self.current_user)
db.session.commit()
self.record_event(
diff --git a/redash/handlers/queries.py b/redash/handlers/queries.py
index e9ee245828..71ae418da8 100644
--- a/redash/handlers/queries.py
+++ b/redash/handlers/queries.py
@@ -2,8 +2,8 @@
from flask import jsonify, request, url_for
from flask_login import login_required
from flask_restful import abort
-from sqlalchemy.orm.exc import StaleDataError
from funcy import partial
+from sqlalchemy.orm.exc import StaleDataError
from redash import models, settings
from redash.authentication.org_resolving import current_org
@@ -14,9 +14,10 @@
org_scoped_rule,
paginate,
routes,
- order_results as _order_results,
)
+from redash.handlers.base import order_results as _order_results
from redash.handlers.query_results import run_query
+from redash.models.parameterized_query import ParameterizedQuery
from redash.permissions import (
can_modify,
not_view_only,
@@ -26,10 +27,8 @@
require_permission,
view_only,
)
-from redash.utils import collect_parameters_from_request
from redash.serializers import QuerySerializer
-from redash.models.parameterized_query import ParameterizedQuery
-
+from redash.utils import collect_parameters_from_request
# Ordering map for relationships
order_map = {
@@ -47,9 +46,7 @@
"-created_by": "-users-name",
}
-order_results = partial(
- _order_results, default_order="-created_at", allowed_orders=order_map
-)
+order_results = partial(_order_results, default_order="-created_at", allowed_orders=order_map)
@routes.route(org_scoped_rule("/api/queries/format"), methods=["POST"])
@@ -64,9 +61,7 @@ def format_sql_query(org_slug=None):
arguments = request.get_json(force=True)
query = arguments.get("query", "")
- return jsonify(
- {"query": sqlparse.format(query, **settings.SQLPARSE_FORMAT_OPTIONS)}
- )
+ return jsonify({"query": sqlparse.format(query, **settings.SQLPARSE_FORMAT_OPTIONS)})
class QuerySearchResource(BaseResource):
@@ -107,14 +102,8 @@ def get(self):
Responds with a list of :ref:`query ` objects.
"""
- results = (
- models.Query.by_user(self.current_user)
- .order_by(models.Query.updated_at.desc())
- .limit(10)
- )
- return QuerySerializer(
- results, with_last_modified_by=False, with_user=False
- ).serialize()
+ results = models.Query.by_user(self.current_user).order_by(models.Query.updated_at.desc()).limit(10)
+ return QuerySerializer(results, with_last_modified_by=False, with_user=False).serialize()
class BaseQueryListResource(BaseResource):
@@ -128,9 +117,7 @@ def get_queries(self, search_term):
multi_byte_search=current_org.get_setting("multi_byte_search_enabled"),
)
else:
- results = models.Query.all_queries(
- self.current_user.group_ids, self.current_user.id, include_drafts=True
- )
+ results = models.Query.all_queries(self.current_user.group_ids, self.current_user.id, include_drafts=True)
return filter_by_tags(results, models.Query.tags)
@require_permission("view_query")
@@ -170,9 +157,7 @@ def get(self):
)
if search_term:
- self.record_event(
- {"action": "search", "object_type": "query", "term": search_term}
- )
+ self.record_event({"action": "search", "object_type": "query", "term": search_term})
else:
self.record_event({"action": "list", "object_type": "query"})
@@ -181,9 +166,7 @@ def get(self):
def require_access_to_dropdown_queries(user, query_def):
parameters = query_def.get("options", {}).get("parameters", [])
- dropdown_query_ids = set(
- [str(p["queryId"]) for p in parameters if p["type"] == "query"]
- )
+ dropdown_query_ids = set([str(p["queryId"]) for p in parameters if p["type"] == "query"])
if dropdown_query_ids:
groups = models.Query.all_groups_for_query_ids(dropdown_query_ids)
@@ -234,9 +217,7 @@ def post(self):
:>json number runtime: Runtime of last query execution, in seconds (may be null)
"""
query_def = request.get_json(force=True)
- data_source = models.DataSource.get_by_id_and_org(
- query_def.pop("data_source_id"), self.current_org
- )
+ data_source = models.DataSource.get_by_id_and_org(query_def.pop("data_source_id"), self.current_org)
require_access(data_source, self.current_user, not_view_only)
require_access_to_dropdown_queries(self.current_user, query_def)
@@ -259,9 +240,7 @@ def post(self):
models.db.session.add(query)
models.db.session.commit()
- self.record_event(
- {"action": "create", "object_id": query.id, "object_type": "query"}
- )
+ self.record_event({"action": "create", "object_id": query.id, "object_type": "query"})
return QuerySerializer(query, with_visualizations=True).serialize()
@@ -301,7 +280,11 @@ def get(self):
"""
search_term = request.args.get("q", "")
if search_term:
- results = models.Query.search_by_user(search_term, self.current_user)
+ results = models.Query.search_by_user(
+ search_term,
+ self.current_user,
+ multi_byte_search=current_org.get_setting("multi_byte_search_enabled"),
+ )
else:
results = models.Query.by_user(self.current_user)
@@ -340,9 +323,7 @@ def post(self, query_id):
Responds with the updated :ref:`query ` object.
"""
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
query_def = request.get_json(force=True)
require_object_modify_permission(query, self.current_user)
@@ -367,9 +348,7 @@ def post(self, query_id):
query_def["tags"] = [tag for tag in query_def["tags"] if tag]
if "data_source_id" in query_def:
- data_source = models.DataSource.get_by_id_and_org(
- query_def["data_source_id"], self.current_org
- )
+ data_source = models.DataSource.get_by_id_and_org(query_def["data_source_id"], self.current_org)
require_access(data_source, self.current_user, not_view_only)
query_def["last_modified_by"] = self.current_user
@@ -397,17 +376,13 @@ def get(self, query_id):
Responds with the :ref:`query ` contents.
"""
- q = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ q = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(q, self.current_user, view_only)
result = QuerySerializer(q, with_visualizations=True).serialize()
result["can_edit"] = can_modify(q, self.current_user)
- self.record_event(
- {"action": "view", "object_id": query_id, "object_type": "query"}
- )
+ self.record_event({"action": "view", "object_id": query_id, "object_type": "query"})
return result
@@ -418,9 +393,7 @@ def delete(self, query_id):
:param query_id: ID of query to archive
"""
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_admin_or_owner(query.user_id)
query.archive(self.current_user)
models.db.session.commit()
@@ -429,9 +402,7 @@ def delete(self, query_id):
class QueryRegenerateApiKeyResource(BaseResource):
@require_permission("edit_query")
def post(self, query_id):
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_admin_or_owner(query.user_id)
query.regenerate_api_key()
models.db.session.commit()
@@ -458,16 +429,12 @@ def post(self, query_id):
Responds with created :ref:`query ` object.
"""
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(query.data_source, self.current_user, not_view_only)
forked_query = query.fork(self.current_user)
models.db.session.commit()
- self.record_event(
- {"action": "fork", "object_id": query_id, "object_type": "query"}
- )
+ self.record_event({"action": "fork", "object_id": query_id, "object_type": "query"})
return QuerySerializer(forked_query, with_visualizations=True).serialize()
@@ -487,17 +454,13 @@ def post(self, query_id):
if self.current_user.is_api_user():
abort(403, message="Please use a user API key.")
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(query, self.current_user, not_view_only)
parameter_values = collect_parameters_from_request(request.args)
parameterized_query = ParameterizedQuery(query.query_text, org=self.current_org)
should_apply_auto_limit = query.options.get("apply_auto_limit", False)
- return run_query(
- parameterized_query, parameter_values, query.data_source, query.id, should_apply_auto_limit
- )
+ return run_query(parameterized_query, parameter_values, query.data_source, query.id, should_apply_auto_limit)
class QueryTagsResource(BaseResource):
@@ -519,6 +482,7 @@ def get(self):
self.current_user.group_ids,
include_drafts=True,
limit=None,
+ multi_byte_search=current_org.get_setting("multi_byte_search_enabled"),
)
favorites = models.Query.favorites(self.current_user, base_query=base_query)
else:
diff --git a/redash/handlers/query_results.py b/redash/handlers/query_results.py
index f4fe90fd3c..bfc4371d08 100644
--- a/redash/handlers/query_results.py
+++ b/redash/handlers/query_results.py
@@ -1,41 +1,40 @@
-import logging
-import time
-
import unicodedata
+from urllib.parse import quote
+
+import regex
from flask import make_response, request
from flask_login import current_user
from flask_restful import abort
-from werkzeug.urls import url_quote
+
from redash import models, settings
from redash.handlers.base import BaseResource, get_object_or_404, record_event
+from redash.models.parameterized_query import (
+ InvalidParameterError,
+ ParameterizedQuery,
+ QueryDetachedFromDataSourceError,
+ dropdown_values,
+)
from redash.permissions import (
has_access,
not_view_only,
require_access,
- require_permission,
require_any_of_permission,
+ require_permission,
view_only,
)
+from redash.serializers import (
+ serialize_job,
+ serialize_query_result,
+ serialize_query_result_to_dsv,
+ serialize_query_result_to_xlsx,
+)
from redash.tasks import Job
from redash.tasks.queries import enqueue_query
from redash.utils import (
collect_parameters_from_request,
json_dumps,
- utcnow,
to_filename,
)
-from redash.models.parameterized_query import (
- ParameterizedQuery,
- InvalidParameterError,
- QueryDetachedFromDataSourceError,
- dropdown_values,
-)
-from redash.serializers import (
- serialize_query_result,
- serialize_query_result_to_dsv,
- serialize_query_result_to_xlsx,
- serialize_job,
-)
def error_response(message, http_status=400):
@@ -51,23 +50,19 @@ def error_response(message, http_status=400):
"This query contains potentially unsafe parameters and cannot be executed with read-only access to this data source.",
403,
),
- "no_permission": error_response(
- "You do not have permission to run queries with this data source.", 403
- ),
- "select_data_source": error_response(
- "Please select data source to run this query.", 401
- ),
+ "no_permission": error_response("You do not have permission to run queries with this data source.", 403),
+ "select_data_source": error_response("Please select data source to run this query.", 401),
+ "no_data_source": error_response("Target data source not available.", 401),
}
-def run_query(
- query, parameters, data_source, query_id, should_apply_auto_limit, max_age=0
-):
+def run_query(query, parameters, data_source, query_id, should_apply_auto_limit, max_age=0):
+ if not data_source:
+ return error_messages["no_data_source"]
+
if data_source.paused:
if data_source.pause_reason:
- message = "{} is paused ({}). Please try later.".format(
- data_source.name, data_source.pause_reason
- )
+ message = "{} is paused ({}). Please try later.".format(data_source.name, data_source.pause_reason)
else:
message = "{} is paused. Please try later.".format(data_source.name)
@@ -78,14 +73,10 @@ def run_query(
except (InvalidParameterError, QueryDetachedFromDataSourceError) as e:
abort(400, message=str(e))
- query_text = data_source.query_runner.apply_auto_limit(
- query.text, should_apply_auto_limit
- )
+ query_text = data_source.query_runner.apply_auto_limit(query.text, should_apply_auto_limit)
if query.missing_params:
- return error_response(
- "Missing parameter value for: {}".format(", ".join(query.missing_params))
- )
+ return error_response("Missing parameter value for: {}".format(", ".join(query.missing_params)))
if max_age == 0:
query_result = None
@@ -107,11 +98,7 @@ def run_query(
)
if query_result:
- return {
- "query_result": serialize_query_result(
- query_result, current_user.is_api_user()
- )
- }
+ return {"query_result": serialize_query_result(query_result, current_user.is_api_user())}
else:
job = enqueue_query(
query_text,
@@ -119,9 +106,7 @@ def run_query(
current_user.id,
current_user.is_api_user(),
metadata={
- "Username": repr(current_user)
- if current_user.is_api_user()
- else current_user.email,
+ "Username": current_user.get_actual_user(),
"query_id": query_id,
},
)
@@ -131,7 +116,8 @@ def run_query(
def get_download_filename(query_result, query, filetype):
retrieved_at = query_result.retrieved_at.strftime("%Y_%m_%d")
if query:
- filename = to_filename(query.name) if query.name != "" else str(query.id)
+ query_name = regex.sub(r"\p{C}", "", query.name)
+ filename = to_filename(query_name) if query_name != "" else str(query.id)
else:
filename = str(query_result.id)
return "{}_{}.{}".format(filename, retrieved_at, filetype)
@@ -145,10 +131,8 @@ def content_disposition_filenames(attachment_filename):
attachment_filename = attachment_filename.encode("ascii")
except UnicodeEncodeError:
filenames = {
- "filename": unicodedata.normalize("NFKD", attachment_filename).encode(
- "ascii", "ignore"
- ),
- "filename*": "UTF-8''%s" % url_quote(attachment_filename, safe=b""),
+ "filename": unicodedata.normalize("NFKD", attachment_filename).encode("ascii", "ignore"),
+ "filename*": "UTF-8''%s" % quote(attachment_filename, safe=b""),
}
else:
filenames = {"filename": attachment_filename}
@@ -180,18 +164,14 @@ def post(self):
max_age = -1
max_age = int(max_age)
query_id = params.get("query_id", "adhoc")
- parameters = params.get(
- "parameters", collect_parameters_from_request(request.args)
- )
+ parameters = params.get("parameters", collect_parameters_from_request(request.args))
parameterized_query = ParameterizedQuery(query, org=self.current_org)
should_apply_auto_limit = params.get("apply_auto_limit", False)
data_source_id = params.get("data_source_id")
if data_source_id:
- data_source = models.DataSource.get_by_id_and_org(
- params.get("data_source_id"), self.current_org
- )
+ data_source = models.DataSource.get_by_id_and_org(params.get("data_source_id"), self.current_org)
else:
return error_messages["select_data_source"]
@@ -213,9 +193,7 @@ def post(self):
class QueryResultDropdownResource(BaseResource):
def get(self, query_id):
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(query.data_source, current_user, view_only)
try:
return dropdown_values(query_id, self.current_org)
@@ -225,18 +203,12 @@ def get(self, query_id):
class QueryDropdownsResource(BaseResource):
def get(self, query_id, dropdown_query_id):
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(query, current_user, view_only)
- related_queries_ids = [
- p["queryId"] for p in query.parameters if p["type"] == "query"
- ]
+ related_queries_ids = [p["queryId"] for p in query.parameters if p["type"] == "query"]
if int(dropdown_query_id) not in related_queries_ids:
- dropdown_query = get_object_or_404(
- models.Query.get_by_id_and_org, dropdown_query_id, self.current_org
- )
+ dropdown_query = get_object_or_404(models.Query.get_by_id_and_org, dropdown_query_id, self.current_org)
require_access(dropdown_query.data_source, current_user, view_only)
return dropdown_values(dropdown_query_id, self.current_org)
@@ -250,9 +222,7 @@ def add_cors_headers(headers):
if set(["*", origin]) & settings.ACCESS_CONTROL_ALLOW_ORIGIN:
headers["Access-Control-Allow-Origin"] = origin
- headers["Access-Control-Allow-Credentials"] = str(
- settings.ACCESS_CONTROL_ALLOW_CREDENTIALS
- ).lower()
+ headers["Access-Control-Allow-Credentials"] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
@require_any_of_permission(("view_query", "execute_query"))
def options(self, query_id=None, query_result_id=None, filetype="json"):
@@ -260,14 +230,10 @@ def options(self, query_id=None, query_result_id=None, filetype="json"):
self.add_cors_headers(headers)
if settings.ACCESS_CONTROL_REQUEST_METHOD:
- headers[
- "Access-Control-Request-Method"
- ] = settings.ACCESS_CONTROL_REQUEST_METHOD
+ headers["Access-Control-Request-Method"] = settings.ACCESS_CONTROL_REQUEST_METHOD
if settings.ACCESS_CONTROL_ALLOW_HEADERS:
- headers[
- "Access-Control-Allow-Headers"
- ] = settings.ACCESS_CONTROL_ALLOW_HEADERS
+ headers["Access-Control-Allow-Headers"] = settings.ACCESS_CONTROL_ALLOW_HEADERS
return make_response("", 200, headers)
@@ -292,16 +258,15 @@ def post(self, query_id):
max_age = -1
max_age = int(max_age)
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
allow_executing_with_view_only_permissions = query.parameterized.is_safe
- should_apply_auto_limit = params.get("apply_auto_limit", False)
+ if "apply_auto_limit" in params:
+ should_apply_auto_limit = params.get("apply_auto_limit", False)
+ else:
+ should_apply_auto_limit = query.options.get("apply_auto_limit", False)
- if has_access(
- query, self.current_user, allow_executing_with_view_only_permissions
- ):
+ if has_access(query, self.current_user, allow_executing_with_view_only_permissions):
return run_query(
query.parameterized,
parameter_values,
@@ -342,38 +307,23 @@ def get(self, query_id=None, query_result_id=None, filetype="json"):
# should check for query parameters and shouldn't cache the result).
should_cache = query_result_id is not None
- parameter_values = collect_parameters_from_request(request.args)
- max_age = int(request.args.get("maxAge", 0))
-
query_result = None
query = None
if query_result_id:
- query_result = get_object_or_404(
- models.QueryResult.get_by_id_and_org, query_result_id, self.current_org
- )
+ query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, query_result_id, self.current_org)
if query_id is not None:
- query = get_object_or_404(
- models.Query.get_by_id_and_org, query_id, self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
- if (
- query_result is None
- and query is not None
- and query.latest_query_data_id is not None
- ):
+ if query_result is None and query is not None and query.latest_query_data_id is not None:
query_result = get_object_or_404(
models.QueryResult.get_by_id_and_org,
query.latest_query_data_id,
self.current_org,
)
- if (
- query is not None
- and query_result is not None
- and self.current_user.is_api_user()
- ):
+ if query is not None and query_result is not None and self.current_user.is_api_user():
if query.query_hash != query_result.query_hash:
abort(404, message="No cached result found for this query.")
@@ -412,9 +362,7 @@ def get(self, query_id=None, query_result_id=None, filetype="json"):
self.add_cors_headers(response.headers)
if should_cache:
- response.headers.add_header(
- "Cache-Control", "private,max-age=%d" % ONE_YEAR
- )
+ response.headers.add_header("Cache-Control", "private,max-age=%d" % ONE_YEAR)
filename = get_download_filename(query_result, query, filetype)
@@ -435,22 +383,16 @@ def make_json_response(query_result):
@staticmethod
def make_csv_response(query_result):
headers = {"Content-Type": "text/csv; charset=UTF-8"}
- return make_response(
- serialize_query_result_to_dsv(query_result, ","), 200, headers
- )
+ return make_response(serialize_query_result_to_dsv(query_result, ","), 200, headers)
@staticmethod
def make_tsv_response(query_result):
headers = {"Content-Type": "text/tab-separated-values; charset=UTF-8"}
- return make_response(
- serialize_query_result_to_dsv(query_result, "\t"), 200, headers
- )
+ return make_response(serialize_query_result_to_dsv(query_result, "\t"), 200, headers)
@staticmethod
def make_excel_response(query_result):
- headers = {
- "Content-Type": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
- }
+ headers = {"Content-Type": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"}
return make_response(serialize_query_result_to_xlsx(query_result), 200, headers)
diff --git a/redash/handlers/query_snippets.py b/redash/handlers/query_snippets.py
index 64808de522..b02eb74425 100644
--- a/redash/handlers/query_snippets.py
+++ b/redash/handlers/query_snippets.py
@@ -2,42 +2,36 @@
from funcy import project
from redash import models
+from redash.handlers.base import (
+ BaseResource,
+ get_object_or_404,
+ require_fields,
+)
from redash.permissions import require_admin_or_owner
-from redash.handlers.base import BaseResource, require_fields, get_object_or_404
class QuerySnippetResource(BaseResource):
def get(self, snippet_id):
- snippet = get_object_or_404(
- models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org
- )
+ snippet = get_object_or_404(models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org)
- self.record_event(
- {"action": "view", "object_id": snippet_id, "object_type": "query_snippet"}
- )
+ self.record_event({"action": "view", "object_id": snippet_id, "object_type": "query_snippet"})
return snippet.to_dict()
def post(self, snippet_id):
req = request.get_json(True)
params = project(req, ("trigger", "description", "snippet"))
- snippet = get_object_or_404(
- models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org
- )
+ snippet = get_object_or_404(models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org)
require_admin_or_owner(snippet.user.id)
self.update_model(snippet, params)
models.db.session.commit()
- self.record_event(
- {"action": "edit", "object_id": snippet.id, "object_type": "query_snippet"}
- )
+ self.record_event({"action": "edit", "object_id": snippet.id, "object_type": "query_snippet"})
return snippet.to_dict()
def delete(self, snippet_id):
- snippet = get_object_or_404(
- models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org
- )
+ snippet = get_object_or_404(models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org)
require_admin_or_owner(snippet.user.id)
models.db.session.delete(snippet)
models.db.session.commit()
@@ -79,7 +73,4 @@ def post(self):
def get(self):
self.record_event({"action": "list", "object_type": "query_snippet"})
- return [
- snippet.to_dict()
- for snippet in models.QuerySnippet.all(org=self.current_org)
- ]
+ return [snippet.to_dict() for snippet in models.QuerySnippet.all(org=self.current_org)]
diff --git a/redash/handlers/settings.py b/redash/handlers/settings.py
index d684f42c35..ecf2796e57 100644
--- a/redash/handlers/settings.py
+++ b/redash/handlers/settings.py
@@ -1,7 +1,7 @@
from flask import request
-from redash.models import db, Organization
-from redash.handlers.base import BaseResource, record_event
+from redash.handlers.base import BaseResource
+from redash.models import Organization, db
from redash.permissions import require_admin
from redash.settings.organization import settings as org_settings
@@ -45,9 +45,7 @@ def post(self):
previous_values[k] = self.current_org.google_apps_domains
self.current_org.settings[Organization.SETTING_GOOGLE_APPS_DOMAINS] = v
else:
- previous_values[k] = self.current_org.get_setting(
- k, raise_on_missing=False
- )
+ previous_values[k] = self.current_org.get_setting(k, raise_on_missing=False)
self.current_org.set_setting(k, v)
db.session.add(self.current_org)
diff --git a/redash/handlers/setup.py b/redash/handlers/setup.py
index caa9be8641..ac4bef5f46 100644
--- a/redash/handlers/setup.py
+++ b/redash/handlers/setup.py
@@ -1,13 +1,12 @@
from flask import g, redirect, render_template, request, url_for
-
from flask_login import login_user
+from wtforms import Form, PasswordField, StringField, validators
+from wtforms.fields.html5 import EmailField
+
from redash import settings
from redash.authentication.org_resolving import current_org
from redash.handlers.base import routes
from redash.models import Group, Organization, User, db
-from redash.tasks.general import subscribe
-from wtforms import BooleanField, Form, PasswordField, StringField, validators
-from wtforms.fields.html5 import EmailField
class SetupForm(Form):
@@ -15,15 +14,13 @@ class SetupForm(Form):
email = EmailField("Email Address", validators=[validators.Email()])
password = PasswordField("Password", validators=[validators.Length(6)])
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
- security_notifications = BooleanField()
- newsletter = BooleanField()
def create_org(org_name, user_name, email, password):
default_org = Organization(name=org_name, slug="default", settings={})
admin_group = Group(
name="admin",
- permissions=["admin", "super_admin"],
+ permissions=Group.ADMIN_PERMISSIONS,
org=default_org,
type=Group.BUILTIN_GROUP,
)
@@ -53,25 +50,17 @@ def create_org(org_name, user_name, email, password):
@routes.route("/setup", methods=["GET", "POST"])
def setup():
- if current_org != None or settings.MULTI_ORG:
+ if current_org != None or settings.MULTI_ORG: # noqa: E711
return redirect("/")
form = SetupForm(request.form)
- form.newsletter.data = True
- form.security_notifications.data = True
if request.method == "POST" and form.validate():
- default_org, user = create_org(
- form.org_name.data, form.name.data, form.email.data, form.password.data
- )
+ default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
g.org = default_org
login_user(user)
- # signup to newsletter if needed
- if form.newsletter.data or form.security_notifications:
- subscribe.delay(form.data)
-
return redirect(url_for("redash.index", org_slug=None))
return render_template("setup.html", form=form)
diff --git a/redash/handlers/static.py b/redash/handlers/static.py
index 1a02b66379..71f10fedb4 100644
--- a/redash/handlers/static.py
+++ b/redash/handlers/static.py
@@ -1,6 +1,7 @@
-from flask import render_template, safe_join, send_file
-
+from flask import render_template, send_file
from flask_login import login_required
+from werkzeug.utils import safe_join
+
from redash import settings
from redash.handlers import routes
from redash.handlers.authentication import base_href
@@ -13,7 +14,7 @@ def render_index():
response = render_template("multi_org.html", base_href=base_href())
else:
full_path = safe_join(settings.STATIC_ASSETS_PATH, "index.html")
- response = send_file(full_path, **dict(cache_timeout=0, conditional=True))
+ response = send_file(full_path, **dict(max_age=0, conditional=True))
return response
diff --git a/redash/handlers/users.py b/redash/handlers/users.py
index 425e0ea4f7..0c5b305c82 100644
--- a/redash/handlers/users.py
+++ b/redash/handlers/users.py
@@ -1,39 +1,33 @@
-import re
-import time
+from disposable_email_domains import blacklist
from flask import request
-from flask_restful import abort
from flask_login import current_user, login_user
-from funcy import project
-from sqlalchemy.orm.exc import NoResultFound
+from flask_restful import abort
+from funcy import partial, project
from sqlalchemy.exc import IntegrityError
-from disposable_email_domains import blacklist
-from funcy import partial
+from sqlalchemy.orm.exc import NoResultFound
-from redash import models, limiter
-from redash.permissions import (
- require_permission,
- require_admin_or_owner,
- is_admin_or_owner,
- require_permission_or_owner,
- require_admin,
+from redash import limiter, models, settings
+from redash.authentication.account import (
+ invite_link_for_user,
+ send_invite_email,
+ send_password_reset_email,
+ send_verify_email,
)
from redash.handlers.base import (
BaseResource,
- require_fields,
get_object_or_404,
paginate,
- order_results as _order_results,
+ require_fields,
)
-
-from redash.authentication.account import (
- invite_link_for_user,
- send_invite_email,
- send_password_reset_email,
- send_verify_email,
+from redash.handlers.base import order_results as _order_results
+from redash.permissions import (
+ is_admin_or_owner,
+ require_admin,
+ require_admin_or_owner,
+ require_permission,
+ require_permission_or_owner,
)
from redash.settings import parse_boolean
-from redash import settings
-
# Ordering map for relationships
order_map = {
@@ -47,9 +41,7 @@
"-groups": "-group_ids",
}
-order_results = partial(
- _order_results, default_order="-created_at", allowed_orders=order_map
-)
+order_results = partial(_order_results, default_order="-created_at", allowed_orders=order_map)
def invite_user(org, inviter, user, send_email=True):
@@ -73,9 +65,7 @@ def require_allowed_email(email):
class UserListResource(BaseResource):
- decorators = BaseResource.decorators + [
- limiter.limit("200/day;50/hour", methods=["POST"])
- ]
+ decorators = BaseResource.decorators + [limiter.limit("200/day;50/hour", methods=["POST"])]
def get_users(self, disabled, pending, search_term):
if disabled:
@@ -97,9 +87,7 @@ def get_users(self, disabled, pending, search_term):
}
)
else:
- self.record_event(
- {"action": "list", "object_type": "user", "pending": pending}
- )
+ self.record_event({"action": "list", "object_type": "user", "pending": pending})
# order results according to passed order parameter,
# special-casing search queries where the database
@@ -131,9 +119,7 @@ def serialize_user(user):
disabled = request.args.get("disabled", "false") # get enabled users by default
disabled = parse_boolean(disabled)
- pending = request.args.get(
- "pending", None
- ) # get both active and pending by default
+ pending = request.args.get("pending", None) # get both active and pending by default
if pending is not None:
pending = parse_boolean(pending)
@@ -166,14 +152,10 @@ def post(self):
abort(400, message="Email already taken.")
abort(500)
- self.record_event(
- {"action": "create", "object_id": user.id, "object_type": "user"}
- )
+ self.record_event({"action": "create", "object_id": user.id, "object_type": "user"})
should_send_invitation = "no_invite" not in request.args
- return invite_user(
- self.current_org, self.current_user, user, send_email=should_send_invitation
- )
+ return invite_user(self.current_org, self.current_user, user, send_email=should_send_invitation)
class UserInviteResource(BaseResource):
@@ -205,9 +187,7 @@ def post(self, user_id):
user.regenerate_api_key()
models.db.session.commit()
- self.record_event(
- {"action": "regnerate_api_key", "object_id": user.id, "object_type": "user"}
- )
+ self.record_event({"action": "regnerate_api_key", "object_id": user.id, "object_type": "user"})
return user.to_dict(with_api_key=True)
@@ -217,32 +197,24 @@ class UserResource(BaseResource):
def get(self, user_id):
require_permission_or_owner("list_users", user_id)
- user = get_object_or_404(
- models.User.get_by_id_and_org, user_id, self.current_org
- )
+ user = get_object_or_404(models.User.get_by_id_and_org, user_id, self.current_org)
- self.record_event(
- {"action": "view", "object_id": user_id, "object_type": "user"}
- )
+ self.record_event({"action": "view", "object_id": user_id, "object_type": "user"})
return user.to_dict(with_api_key=is_admin_or_owner(user_id))
- def post(self, user_id):
+ def post(self, user_id): # noqa: C901
require_admin_or_owner(user_id)
user = models.User.get_by_id_and_org(user_id, self.current_org)
req = request.get_json(True)
- params = project(
- req, ("email", "name", "password", "old_password", "group_ids")
- )
+ params = project(req, ("email", "name", "password", "old_password", "group_ids"))
if "password" in params and "old_password" not in params:
abort(403, message="Must provide current password to update password.")
- if "old_password" in params and not user.verify_password(
- params["old_password"]
- ):
+ if "old_password" in params and not user.verify_password(params["old_password"]):
abort(403, message="Incorrect current password.")
if "password" in params:
@@ -266,9 +238,7 @@ def post(self, user_id):
require_allowed_email(params["email"])
email_address_changed = "email" in params and params["email"] != user.email
- needs_to_verify_email = (
- email_address_changed and settings.email_server_is_configured()
- )
+ needs_to_verify_email = email_address_changed and settings.email_server_is_configured()
if needs_to_verify_email:
user.is_email_verified = False
@@ -312,13 +282,13 @@ def delete(self, user_id):
abort(
403,
message="You cannot delete your own account. "
- "Please ask another admin to do this for you.",
+ "Please ask another admin to do this for you.", # fmt: skip
)
elif not user.is_invitation_pending:
abort(
403,
message="You cannot delete activated users. "
- "Please disable the user instead.",
+ "Please disable the user instead.", # fmt: skip
)
models.db.session.delete(user)
models.db.session.commit()
@@ -336,7 +306,7 @@ def post(self, user_id):
abort(
403,
message="You cannot disable your own account. "
- "Please ask another admin to do this for you.",
+ "Please ask another admin to do this for you.", # fmt: skip
)
user.disable()
models.db.session.commit()
diff --git a/redash/handlers/visualizations.py b/redash/handlers/visualizations.py
index 1621ea50cd..f29a1fb36c 100644
--- a/redash/handlers/visualizations.py
+++ b/redash/handlers/visualizations.py
@@ -2,9 +2,11 @@
from redash import models
from redash.handlers.base import BaseResource, get_object_or_404
+from redash.permissions import (
+ require_object_modify_permission,
+ require_permission,
+)
from redash.serializers import serialize_visualization
-from redash.permissions import require_object_modify_permission, require_permission
-from redash.utils import json_dumps
class VisualizationListResource(BaseResource):
@@ -12,12 +14,9 @@ class VisualizationListResource(BaseResource):
def post(self):
kwargs = request.get_json(force=True)
- query = get_object_or_404(
- models.Query.get_by_id_and_org, kwargs.pop("query_id"), self.current_org
- )
+ query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop("query_id"), self.current_org)
require_object_modify_permission(query, self.current_user)
- kwargs["options"] = json_dumps(kwargs["options"])
kwargs["query_rel"] = query
vis = models.Visualization(**kwargs)
@@ -29,14 +28,10 @@ def post(self):
class VisualizationResource(BaseResource):
@require_permission("edit_query")
def post(self, visualization_id):
- vis = get_object_or_404(
- models.Visualization.get_by_id_and_org, visualization_id, self.current_org
- )
+ vis = get_object_or_404(models.Visualization.get_by_id_and_org, visualization_id, self.current_org)
require_object_modify_permission(vis.query_rel, self.current_user)
kwargs = request.get_json(force=True)
- if "options" in kwargs:
- kwargs["options"] = json_dumps(kwargs["options"])
kwargs.pop("id", None)
kwargs.pop("query_id", None)
@@ -48,9 +43,7 @@ def post(self, visualization_id):
@require_permission("edit_query")
def delete(self, visualization_id):
- vis = get_object_or_404(
- models.Visualization.get_by_id_and_org, visualization_id, self.current_org
- )
+ vis = get_object_or_404(models.Visualization.get_by_id_and_org, visualization_id, self.current_org)
require_object_modify_permission(vis.query_rel, self.current_user)
self.record_event(
{
diff --git a/redash/handlers/webpack.py b/redash/handlers/webpack.py
index 01a0342549..8b7cb2dc50 100644
--- a/redash/handlers/webpack.py
+++ b/redash/handlers/webpack.py
@@ -1,10 +1,9 @@
+import json
import os
-import simplejson
+
from flask import url_for
-WEBPACK_MANIFEST_PATH = os.path.join(
- os.path.dirname(__file__), "../../client/dist/", "asset-manifest.json"
-)
+WEBPACK_MANIFEST_PATH = os.path.join(os.path.dirname(__file__), "../../client/dist/", "asset-manifest.json")
def configure_webpack(app):
@@ -16,7 +15,7 @@ def get_asset(path):
if assets is None or app.debug:
try:
with open(WEBPACK_MANIFEST_PATH) as fp:
- assets = simplejson.load(fp)
+ assets = json.load(fp)
except IOError:
app.logger.exception("Unable to load webpack manifest")
assets = {}
diff --git a/redash/handlers/widgets.py b/redash/handlers/widgets.py
index 6907943405..051b6e386c 100644
--- a/redash/handlers/widgets.py
+++ b/redash/handlers/widgets.py
@@ -2,14 +2,13 @@
from redash import models
from redash.handlers.base import BaseResource
-from redash.serializers import serialize_widget
from redash.permissions import (
require_access,
require_object_modify_permission,
require_permission,
view_only,
)
-from redash.utils import json_dumps
+from redash.serializers import serialize_widget
class WidgetListResource(BaseResource):
@@ -27,19 +26,14 @@ def post(self):
:>json object widget: The created widget
"""
widget_properties = request.get_json(force=True)
- dashboard = models.Dashboard.get_by_id_and_org(
- widget_properties.get("dashboard_id"), self.current_org
- )
+ dashboard = models.Dashboard.get_by_id_and_org(widget_properties.get("dashboard_id"), self.current_org)
require_object_modify_permission(dashboard, self.current_user)
- widget_properties["options"] = json_dumps(widget_properties["options"])
widget_properties.pop("id", None)
visualization_id = widget_properties.pop("visualization_id")
if visualization_id:
- visualization = models.Visualization.get_by_id_and_org(
- visualization_id, self.current_org
- )
+ visualization = models.Visualization.get_by_id_and_org(visualization_id, self.current_org)
require_access(visualization.query_rel, self.current_user, view_only)
else:
visualization = None
@@ -48,7 +42,6 @@ def post(self):
widget = models.Widget(**widget_properties)
models.db.session.add(widget)
- models.db.session.commit()
models.db.session.commit()
return serialize_widget(widget)
@@ -69,7 +62,7 @@ def post(self, widget_id):
require_object_modify_permission(widget.dashboard, self.current_user)
widget_properties = request.get_json(force=True)
widget.text = widget_properties["text"]
- widget.options = json_dumps(widget_properties["options"])
+ widget.options = widget_properties["options"]
models.db.session.commit()
return serialize_widget(widget)
@@ -82,8 +75,6 @@ def delete(self, widget_id):
"""
widget = models.Widget.get_by_id_and_org(widget_id, self.current_org)
require_object_modify_permission(widget.dashboard, self.current_user)
- self.record_event(
- {"action": "delete", "object_id": widget_id, "object_type": "widget"}
- )
+ self.record_event({"action": "delete", "object_id": widget_id, "object_type": "widget"})
models.db.session.delete(widget)
models.db.session.commit()
diff --git a/redash/metrics/database.py b/redash/metrics/database.py
index 8b12765fe9..152427b2e5 100644
--- a/redash/metrics/database.py
+++ b/redash/metrics/database.py
@@ -2,13 +2,13 @@
import time
from flask import g, has_request_context
-
-from redash import statsd_client
from sqlalchemy.engine import Engine
from sqlalchemy.event import listens_for
from sqlalchemy.orm.util import _ORMJoin
from sqlalchemy.sql.selectable import Alias
+from redash import statsd_client
+
metrics_logger = logging.getLogger("metrics")
diff --git a/redash/metrics/request.py b/redash/metrics/request.py
index 7f94da4ad9..9dc169f3b7 100644
--- a/redash/metrics/request.py
+++ b/redash/metrics/request.py
@@ -9,7 +9,7 @@
metrics_logger = logging.getLogger("metrics")
-def record_requets_start_time():
+def record_request_start_time():
g.start_time = time.time()
@@ -35,16 +35,12 @@ def calculate_metrics(response):
queries_duration,
)
- statsd_client.timing(
- "requests.{}.{}".format(endpoint, request.method.lower()), request_duration
- )
+ statsd_client.timing("requests.{}.{}".format(endpoint, request.method.lower()), request_duration)
return response
-MockResponse = namedtuple(
- "MockResponse", ["status_code", "content_type", "content_length"]
-)
+MockResponse = namedtuple("MockResponse", ["status_code", "content_type", "content_length"])
def calculate_metrics_on_exception(error):
@@ -53,6 +49,6 @@ def calculate_metrics_on_exception(error):
def init_app(app):
- app.before_request(record_requets_start_time)
+ app.before_request(record_request_start_time)
app.after_request(calculate_metrics)
app.teardown_request(calculate_metrics_on_exception)
diff --git a/redash/models/__init__.py b/redash/models/__init__.py
index 1f9d8a0fd7..a0543ff5a0 100644
--- a/redash/models/__init__.py
+++ b/redash/models/__init__.py
@@ -1,65 +1,90 @@
-import datetime
import calendar
+import datetime
import logging
-import time
import numbers
-import pytz
+import time
-from sqlalchemy import distinct, or_, and_, UniqueConstraint, cast
-from sqlalchemy.dialects import postgresql
+import pytz
+from sqlalchemy import UniqueConstraint, and_, cast, distinct, func, or_
+from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, JSONB
from sqlalchemy.event import listens_for
from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import backref, contains_eager, joinedload, subqueryload, load_only
+from sqlalchemy.orm import (
+ backref,
+ contains_eager,
+ joinedload,
+ load_only,
+ subqueryload,
+)
from sqlalchemy.orm.exc import NoResultFound # noqa: F401
-from sqlalchemy import func
from sqlalchemy_utils import generic_relationship
-from sqlalchemy_utils.types import TSVectorType
from sqlalchemy_utils.models import generic_repr
+from sqlalchemy_utils.types import TSVectorType
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
-from redash import redis_connection, utils, settings
+from redash import redis_connection, settings, utils
from redash.destinations import (
get_configuration_schema_for_destination_type,
get_destination,
)
from redash.metrics import database # noqa: F401
+from redash.models.base import (
+ Column,
+ GFKBase,
+ SearchBaseQuery,
+ db,
+ gfk_type,
+ key_type,
+ primary_key,
+)
+from redash.models.changes import Change, ChangeTrackingMixin # noqa
+from redash.models.mixins import BelongsToOrgMixin, TimestampMixin
+from redash.models.organizations import Organization
+from redash.models.parameterized_query import (
+ InvalidParameterError,
+ ParameterizedQuery,
+ QueryDetachedFromDataSourceError,
+)
+from redash.models.types import (
+ Configuration,
+ EncryptedConfiguration,
+ JSONText,
+ MutableDict,
+ MutableList,
+ json_cast_property,
+)
+from redash.models.users import ( # noqa
+ AccessPermission,
+ AnonymousUser,
+ ApiUser,
+ Group,
+ User,
+)
from redash.query_runner import (
- with_ssh_tunnel,
- get_configuration_schema_for_query_runner_type,
- get_query_runner,
TYPE_BOOLEAN,
TYPE_DATE,
TYPE_DATETIME,
- BaseQueryRunner)
+ BaseQueryRunner,
+ get_configuration_schema_for_query_runner_type,
+ get_query_runner,
+ with_ssh_tunnel,
+)
from redash.utils import (
+ base_url,
+ gen_query_hash,
generate_token,
json_dumps,
json_loads,
mustache_render,
- base_url,
+ mustache_render_escape,
sentry,
- gen_query_hash)
-from redash.utils.configuration import ConfigurationContainer
-from redash.models.parameterized_query import ParameterizedQuery
-
-from .base import db, gfk_type, Column, GFKBase, SearchBaseQuery, key_type, primary_key
-from .changes import ChangeTrackingMixin, Change # noqa
-from .mixins import BelongsToOrgMixin, TimestampMixin
-from .organizations import Organization
-from .types import (
- EncryptedConfiguration,
- Configuration,
- MutableDict,
- MutableList,
- PseudoJSON,
- pseudo_json_cast_property
)
-from .users import AccessPermission, AnonymousUser, ApiUser, Group, User # noqa
+from redash.utils.configuration import ConfigurationContainer
logger = logging.getLogger(__name__)
-class ScheduledQueriesExecutions(object):
+class ScheduledQueriesExecutions:
KEY_NAME = "sq:executed_at"
def __init__(self):
@@ -69,7 +94,7 @@ def refresh(self):
self.executions = redis_connection.hgetall(self.KEY_NAME)
def update(self, query_id):
- redis_connection.hmset(self.KEY_NAME, {query_id: time.time()})
+ redis_connection.hset(self.KEY_NAME, mapping={query_id: time.time()})
def get(self, query_id):
timestamp = self.executions.get(str(query_id))
@@ -93,20 +118,19 @@ class DataSource(BelongsToOrgMixin, db.Model):
options = Column(
"encrypted_options",
ConfigurationContainer.as_mutable(
- EncryptedConfiguration(
- db.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
- )
+ EncryptedConfiguration(db.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine)
),
)
queue_name = Column(db.String(255), default="queries")
scheduled_queue_name = Column(db.String(255), default="scheduled_queries")
created_at = Column(db.DateTime(True), default=db.func.now())
- data_source_groups = db.relationship(
- "DataSourceGroup", back_populates="data_source", cascade="all"
- )
+ data_source_groups = db.relationship("DataSourceGroup", back_populates="data_source", cascade="all")
__tablename__ = "data_sources"
- __table_args__ = (db.Index("data_sources_org_id_name", "org_id", "name"),)
+ __table_args__ = (
+ db.Index("data_sources_org_id_name", "org_id", "name"),
+ {"extend_existing": True},
+ )
def __eq__(self, other):
return self.id == other.id
@@ -122,7 +146,7 @@ def to_dict(self, all=False, with_permissions_for=None):
"syntax": self.query_runner.syntax,
"paused": self.paused,
"pause_reason": self.pause_reason,
- "supports_auto_limit": self.query_runner.supports_auto_limit
+ "supports_auto_limit": self.query_runner.supports_auto_limit,
}
if all:
@@ -151,9 +175,7 @@ def __str__(self):
@classmethod
def create_with_group(cls, *args, **kwargs):
data_source = cls(*args, **kwargs)
- data_source_group = DataSourceGroup(
- data_source=data_source, group=data_source.org.default_group
- )
+ data_source_group = DataSourceGroup(data_source=data_source, group=data_source.org.default_group)
db.session.add_all([data_source, data_source_group])
return data_source
@@ -162,9 +184,7 @@ def all(cls, org, group_ids=None):
data_sources = cls.query.filter(cls.org == org).order_by(cls.id.asc())
if group_ids:
- data_sources = data_sources.join(DataSourceGroup).filter(
- DataSourceGroup.group_id.in_(group_ids)
- )
+ data_sources = data_sources.join(DataSourceGroup).filter(DataSourceGroup.group_id.in_(group_ids))
return data_sources.distinct()
@@ -173,9 +193,7 @@ def get_by_id(cls, _id):
return cls.query.filter(cls.id == _id).one()
def delete(self):
- Query.query.filter(Query.data_source == self).update(
- dict(data_source_id=None, latest_query_data_id=None)
- )
+ Query.query.filter(Query.data_source == self).update(dict(data_source_id=None, latest_query_data_id=None))
QueryResult.query.filter(QueryResult.data_source == self).delete()
res = db.session.delete(self)
db.session.commit()
@@ -200,12 +218,11 @@ def get_schema(self, refresh=False):
try:
out_schema = self._sort_schema(schema)
except Exception:
- logging.exception(
- "Error sorting schema columns for data_source {}".format(self.id)
- )
+ logging.exception("Error sorting schema columns for data_source {}".format(self.id))
out_schema = schema
finally:
- redis_connection.set(self._schema_key, json_dumps(out_schema))
+ ttl = int(datetime.timedelta(minutes=settings.SCHEMAS_REFRESH_SCHEDULE, days=7).total_seconds())
+ redis_connection.set(self._schema_key, json_dumps(out_schema), ex=ttl)
return out_schema
@@ -243,22 +260,18 @@ def add_group(self, group, view_only=False):
return dsg
def remove_group(self, group):
- DataSourceGroup.query.filter(
- DataSourceGroup.group == group, DataSourceGroup.data_source == self
- ).delete()
+ DataSourceGroup.query.filter(DataSourceGroup.group == group, DataSourceGroup.data_source == self).delete()
db.session.commit()
def update_group_permission(self, group, view_only):
- dsg = DataSourceGroup.query.filter(
- DataSourceGroup.group == group, DataSourceGroup.data_source == self
- ).one()
+ dsg = DataSourceGroup.query.filter(DataSourceGroup.group == group, DataSourceGroup.data_source == self).one()
dsg.view_only = view_only
db.session.add(dsg)
return dsg
@property
def uses_ssh_tunnel(self):
- return "ssh_tunnel" in self.options
+ return self.options and "ssh_tunnel" in self.options
@property
def query_runner(self):
@@ -291,36 +304,11 @@ class DataSourceGroup(db.Model):
view_only = Column(db.Boolean, default=False)
__tablename__ = "data_source_groups"
-
-
-DESERIALIZED_DATA_ATTR = "_deserialized_data"
-
-
-class DBPersistence(object):
- @property
- def data(self):
- if self._data is None:
- return None
-
- if not hasattr(self, DESERIALIZED_DATA_ATTR):
- setattr(self, DESERIALIZED_DATA_ATTR, json_loads(self._data))
-
- return self._deserialized_data
-
- @data.setter
- def data(self, data):
- if hasattr(self, DESERIALIZED_DATA_ATTR):
- delattr(self, DESERIALIZED_DATA_ATTR)
- self._data = data
-
-
-QueryResultPersistence = (
- settings.dynamic_settings.QueryResultPersistence or DBPersistence
-)
+ __table_args__ = ({"extend_existing": True},)
@generic_repr("id", "org_id", "data_source_id", "query_hash", "runtime", "retrieved_at")
-class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
+class QueryResult(db.Model, BelongsToOrgMixin):
id = primary_key("QueryResult")
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
org = db.relationship(Organization)
@@ -328,8 +316,8 @@ class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
data_source = db.relationship(DataSource, backref=backref("query_results"))
query_hash = Column(db.String(32), index=True)
query_text = Column("query", db.Text)
- _data = Column("data", db.Text)
- runtime = Column(postgresql.DOUBLE_PRECISION)
+ data = Column(JSONText, nullable=True)
+ runtime = Column(DOUBLE_PRECISION)
retrieved_at = Column(db.DateTime(True))
__tablename__ = "query_results"
@@ -351,27 +339,25 @@ def to_dict(self):
@classmethod
def unused(cls, days=7):
age_threshold = datetime.datetime.now() - datetime.timedelta(days=days)
- return (
- cls.query.filter(
- Query.id.is_(None), cls.retrieved_at < age_threshold
- ).outerjoin(Query)
- ).options(load_only("id"))
+ return (cls.query.filter(Query.id.is_(None), cls.retrieved_at < age_threshold).outerjoin(Query)).options(
+ load_only("id")
+ )
@classmethod
def get_latest(cls, data_source, query, max_age=0):
query_hash = gen_query_hash(query)
+ if max_age == -1 and settings.QUERY_RESULTS_EXPIRED_TTL_ENABLED:
+ max_age = settings.QUERY_RESULTS_EXPIRED_TTL
+
if max_age == -1:
- query = cls.query.filter(
- cls.query_hash == query_hash, cls.data_source == data_source
- )
+ query = cls.query.filter(cls.query_hash == query_hash, cls.data_source == data_source)
else:
query = cls.query.filter(
cls.query_hash == query_hash,
cls.data_source == data_source,
(
- db.func.timezone("utc", cls.retrieved_at)
- + datetime.timedelta(seconds=max_age)
+ db.func.timezone("utc", cls.retrieved_at) + datetime.timedelta(seconds=max_age)
>= db.func.timezone("utc", db.func.now())
),
)
@@ -379,9 +365,7 @@ def get_latest(cls, data_source, query, max_age=0):
return query.order_by(cls.retrieved_at.desc()).first()
@classmethod
- def store_result(
- cls, org, data_source, query_hash, query, data, run_time, retrieved_at
- ):
+ def store_result(cls, org, data_source, query_hash, query, data, run_time, retrieved_at):
query_result = cls(
org_id=org,
query_hash=query_hash,
@@ -402,9 +386,7 @@ def groups(self):
return self.data_source.groups
-def should_schedule_next(
- previous_iteration, now, interval, time=None, day_of_week=None, failures=0
-):
+def should_schedule_next(previous_iteration, now, interval, time=None, day_of_week=None, failures=0):
# if time exists then interval > 23 hours (82800s)
# if day_of_week exists then interval > 6 days (518400s)
if time is None:
@@ -418,32 +400,23 @@ def should_schedule_next(
# - The query scheduled to run at 23:59.
# - The scheduler wakes up at 00:01.
# - Using naive implementation of comparing timestamps, it will skip the execution.
- normalized_previous_iteration = previous_iteration.replace(
- hour=hour, minute=minute
- )
+ normalized_previous_iteration = previous_iteration.replace(hour=hour, minute=minute)
if normalized_previous_iteration > previous_iteration:
- previous_iteration = normalized_previous_iteration - datetime.timedelta(
- days=1
- )
+ previous_iteration = normalized_previous_iteration - datetime.timedelta(days=1)
days_delay = int(interval) / 60 / 60 / 24
days_to_add = 0
if day_of_week is not None:
- days_to_add = (
- list(calendar.day_name).index(day_of_week)
- - normalized_previous_iteration.weekday()
- )
+ days_to_add = list(calendar.day_name).index(day_of_week) - normalized_previous_iteration.weekday()
next_iteration = (
- previous_iteration
- + datetime.timedelta(days=days_delay)
- + datetime.timedelta(days=days_to_add)
+ previous_iteration + datetime.timedelta(days=days_delay) + datetime.timedelta(days=days_to_add)
).replace(hour=hour, minute=minute)
if failures:
try:
- next_iteration += datetime.timedelta(minutes=2 ** failures)
+ next_iteration += datetime.timedelta(minutes=2**failures)
except OverflowError:
return False
return now > next_iteration
@@ -472,9 +445,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
org = db.relationship(Organization, backref="queries")
data_source_id = Column(key_type("DataSource"), db.ForeignKey("data_sources.id"), nullable=True)
data_source = db.relationship(DataSource, backref="queries")
- latest_query_data_id = Column(
- key_type("QueryResult"), db.ForeignKey("query_results.id"), nullable=True
- )
+ latest_query_data_id = Column(key_type("QueryResult"), db.ForeignKey("query_results.id"), nullable=True)
latest_query_data = db.relationship(QueryResult)
name = Column(db.String(255))
description = Column(db.String(4096), nullable=True)
@@ -484,16 +455,14 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
user = db.relationship(User, foreign_keys=[user_id])
last_modified_by_id = Column(key_type("User"), db.ForeignKey("users.id"), nullable=True)
- last_modified_by = db.relationship(
- User, backref="modified_queries", foreign_keys=[last_modified_by_id]
- )
+ last_modified_by = db.relationship(User, backref="modified_queries", foreign_keys=[last_modified_by_id])
is_archived = Column(db.Boolean, default=False, index=True)
is_draft = Column(db.Boolean, default=True, index=True)
- schedule = Column(MutableDict.as_mutable(PseudoJSON), nullable=True)
- interval = pseudo_json_cast_property(db.Integer, "schedule", "interval", default=0)
+ schedule = Column(MutableDict.as_mutable(JSONB), nullable=True)
+ interval = json_cast_property(db.Integer, "schedule", "interval", default=0)
schedule_failures = Column(db.Integer, default=0)
visualizations = db.relationship("Visualization", cascade="all, delete-orphan")
- options = Column(MutableDict.as_mutable(PseudoJSON), default={})
+ options = Column(MutableDict.as_mutable(JSONB), default={})
search_vector = Column(
TSVectorType(
"id",
@@ -504,9 +473,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
),
nullable=True,
)
- tags = Column(
- "tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True
- )
+ tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
query_class = SearchBaseQuery
__tablename__ = "queries"
@@ -542,43 +509,33 @@ def create(cls, **kwargs):
name="Table",
description="",
type="TABLE",
- options="{}",
+ options={},
)
)
return query
@classmethod
- def all_queries(
- cls, group_ids, user_id=None, include_drafts=False, include_archived=False
- ):
+ def all_queries(cls, group_ids, user_id=None, include_drafts=False, include_archived=False):
query_ids = (
db.session.query(distinct(cls.id))
- .join(
- DataSourceGroup, Query.data_source_id == DataSourceGroup.data_source_id
- )
+ .join(DataSourceGroup, Query.data_source_id == DataSourceGroup.data_source_id)
.filter(Query.is_archived.is_(include_archived))
.filter(DataSourceGroup.group_id.in_(group_ids))
)
queries = (
cls.query.options(
joinedload(Query.user),
- joinedload(Query.latest_query_data).load_only(
- "runtime", "retrieved_at"
- ),
+ joinedload(Query.latest_query_data).load_only("runtime", "retrieved_at"),
)
.filter(cls.id.in_(query_ids))
# Adding outer joins to be able to order by relationship
.outerjoin(User, User.id == Query.user_id)
.outerjoin(QueryResult, QueryResult.id == Query.latest_query_data_id)
- .options(
- contains_eager(Query.user), contains_eager(Query.latest_query_data)
- )
+ .options(contains_eager(Query.user), contains_eager(Query.latest_query_data))
)
if not include_drafts:
- queries = queries.filter(
- or_(Query.is_draft.is_(False), Query.user_id == user_id)
- )
+ queries = queries.filter(or_(Query.is_draft.is_(False), Query.user_id == user_id))
return queries
@classmethod
@@ -594,9 +551,7 @@ def favorites(cls, user, base_query=None):
@classmethod
def all_tags(cls, user, include_drafts=False):
- queries = cls.all_queries(
- group_ids=user.group_ids, user_id=user.id, include_drafts=include_drafts
- )
+ queries = cls.all_queries(group_ids=user.group_ids, user_id=user.id, include_drafts=include_drafts)
tag_column = func.unnest(cls.tags).label("tag")
usage_count = func.count(1).label("usage_count")
@@ -620,24 +575,20 @@ def by_api_key(cls, api_key):
@classmethod
def past_scheduled_queries(cls):
now = utils.utcnow()
- queries = Query.query.filter(Query.schedule.isnot(None)).order_by(Query.id)
+ queries = Query.query.filter(func.jsonb_typeof(Query.schedule) != "null").order_by(Query.id)
return [
query
for query in queries
- if query.schedule["until"] is not None
- and pytz.utc.localize(
- datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d")
- )
- <= now
+ if "until" in query.schedule
+ and query.schedule["until"] is not None
+ and pytz.utc.localize(datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d")) <= now
]
@classmethod
def outdated_queries(cls):
queries = (
- Query.query.options(
- joinedload(Query.latest_query_data).load_only("retrieved_at")
- )
- .filter(Query.schedule.isnot(None))
+ Query.query.options(joinedload(Query.latest_query_data).load_only("retrieved_at"))
+ .filter(func.jsonb_typeof(Query.schedule) != "null")
.order_by(Query.id)
.all()
)
@@ -652,9 +603,7 @@ def outdated_queries(cls):
continue
if query.schedule["until"]:
- schedule_until = pytz.utc.localize(
- datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d")
- )
+ schedule_until = pytz.utc.localize(datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d"))
if schedule_until <= now:
continue
@@ -682,9 +631,7 @@ def outdated_queries(cls):
% (query.id, repr(e))
)
logging.info(message)
- sentry.capture_exception(
- type(e)(message).with_traceback(e.__traceback__)
- )
+ sentry.capture_exception(type(e)(message).with_traceback(e.__traceback__))
return list(outdated_queries.values())
@@ -710,9 +657,7 @@ def search(
# Since tsvector doesn't work well with CJK languages, use `ilike` too
pattern = "%{}%".format(term)
return (
- all_queries.filter(
- or_(cls.name.ilike(pattern), cls.description.ilike(pattern))
- )
+ all_queries.filter(or_(cls.name.ilike(pattern), cls.description.ilike(pattern)))
.order_by(Query.id)
.limit(limit)
)
@@ -721,7 +666,17 @@ def search(
return all_queries.search(term, sort=True).limit(limit)
@classmethod
- def search_by_user(cls, term, user, limit=None):
+ def search_by_user(cls, term, user, limit=None, multi_byte_search=False):
+ if multi_byte_search:
+ # Since tsvector doesn't work well with CJK languages, use `ilike` too
+ pattern = "%{}%".format(term)
+ return (
+ cls.by_user(user)
+ .filter(or_(cls.name.ilike(pattern), cls.description.ilike(pattern)))
+ .order_by(Query.id)
+ .limit(limit)
+ )
+
return cls.by_user(user).search(term, sort=True).limit(limit)
@classmethod
@@ -729,18 +684,14 @@ def recent(cls, group_ids, user_id=None, limit=20):
query = (
cls.query.filter(Event.created_at > (db.func.current_date() - 7))
.join(Event, Query.id == Event.object_id.cast(db.Integer))
- .join(
- DataSourceGroup, Query.data_source_id == DataSourceGroup.data_source_id
- )
+ .join(DataSourceGroup, Query.data_source_id == DataSourceGroup.data_source_id)
.filter(
- Event.action.in_(
- ["edit", "execute", "edit_name", "edit_description", "view_source"]
- ),
- Event.object_id != None,
+ Event.action.in_(["edit", "execute", "edit_name", "edit_description", "view_source"]),
+ Event.object_id is not None,
Event.object_type == "query",
DataSourceGroup.group_id.in_(group_ids),
- or_(Query.is_draft == False, Query.user_id == user_id),
- Query.is_archived == False,
+ or_(Query.is_draft.is_(False), Query.user_id is user_id),
+ Query.is_archived.is_(False),
)
.group_by(Event.object_id, Query.id)
.order_by(db.desc(db.func.count(0)))
@@ -772,6 +723,7 @@ def update_latest_result(cls, query_result):
queries = Query.query.filter(
Query.query_hash == query_result.query_hash,
Query.data_source == query_result.data_source,
+ Query.is_archived.is_(False),
)
for q in queries:
@@ -803,16 +755,12 @@ def fork(self, user):
kwargs = {a: getattr(self, a) for a in forked_list}
# Query.create will add default TABLE visualization, so use constructor to create bare copy of query
- forked_query = Query(
- name="Copy of (#{}) {}".format(self.id, self.name), user=user, **kwargs
- )
+ forked_query = Query(name="Copy of (#{}) {}".format(self.id, self.name), user=user, **kwargs)
for v in sorted(self.visualizations, key=lambda v: v.id):
forked_v = v.copy()
forked_v["query_rel"] = forked_query
- fv = Visualization(
- **forked_v
- ) # it will magically add it to `forked_query.visualizations`
+ fv = Visualization(**forked_v) # it will magically add it to `forked_query.visualizations`
db.session.add(fv)
db.session.add(forked_query)
@@ -868,7 +816,20 @@ def dashboard_api_keys(self):
def update_query_hash(self):
should_apply_auto_limit = self.options.get("apply_auto_limit", False) if self.options else False
query_runner = self.data_source.query_runner if self.data_source else BaseQueryRunner({})
- self.query_hash = query_runner.gen_query_hash(self.query_text, should_apply_auto_limit)
+ query_text = self.query_text
+
+ parameters_dict = {p["name"]: p.get("value") for p in self.parameters} if self.options else {}
+ if any(parameters_dict):
+ try:
+ query_text = self.parameterized.apply(parameters_dict).query
+ except InvalidParameterError as e:
+ logging.info(f"Unable to update hash for query {self.id} because of invalid parameters: {str(e)}")
+ except QueryDetachedFromDataSourceError as e:
+ logging.info(
+ f"Unable to update hash for query {self.id} because of dropdown query {e.query_id} is unattached from datasource"
+ )
+
+ self.query_hash = query_runner.gen_query_hash(query_text, should_apply_auto_limit)
@listens_for(Query, "before_insert")
@@ -895,9 +856,7 @@ class Favorite(TimestampMixin, db.Model):
user = db.relationship(User, backref="favorites")
__tablename__ = "favorites"
- __table_args__ = (
- UniqueConstraint("object_type", "object_id", "user_id", name="unique_favorite"),
- )
+ __table_args__ = (UniqueConstraint("object_type", "object_id", "user_id", name="unique_favorite"),)
@classmethod
def is_favorite(cls, user, object):
@@ -957,19 +916,20 @@ def next_state(op, value, threshold):
if op(value, threshold):
new_state = Alert.TRIGGERED_STATE
+ elif not value_is_number and op not in [OPERATORS.get("!="), OPERATORS.get("=="), OPERATORS.get("equals")]:
+ new_state = Alert.UNKNOWN_STATE
else:
new_state = Alert.OK_STATE
return new_state
-@generic_repr(
- "id", "name", "query_id", "user_id", "state", "last_triggered_at", "rearm"
-)
+@generic_repr("id", "name", "query_id", "user_id", "state", "last_triggered_at", "rearm")
class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
UNKNOWN_STATE = "unknown"
OK_STATE = "ok"
TRIGGERED_STATE = "triggered"
+ TEST_STATE = "test"
id = primary_key("Alert")
name = Column(db.String(255))
@@ -977,7 +937,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
query_rel = db.relationship(Query, backref=backref("alerts", cascade="all"))
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
user = db.relationship(User, backref="alerts")
- options = Column(MutableDict.as_mutable(PseudoJSON))
+ options = Column(MutableDict.as_mutable(JSONB), nullable=True)
state = Column(db.String(255), default=UNKNOWN_STATE)
subscriptions = db.relationship("AlertSubscription", cascade="all, delete-orphan")
last_triggered_at = Column(db.DateTime(True), nullable=True)
@@ -990,9 +950,7 @@ def all(cls, group_ids):
return (
cls.query.options(joinedload(Alert.user), joinedload(Alert.query_rel))
.join(Query)
- .join(
- DataSourceGroup, DataSourceGroup.data_source_id == Query.data_source_id
- )
+ .join(DataSourceGroup, DataSourceGroup.data_source_id == Query.data_source_id)
.filter(DataSourceGroup.group_id.in_(group_ids))
)
@@ -1006,7 +964,28 @@ def evaluate(self):
if data["rows"] and self.options["column"] in data["rows"][0]:
op = OPERATORS.get(self.options["op"], lambda v, t: False)
- value = data["rows"][0][self.options["column"]]
+ if "selector" not in self.options:
+ selector = "first"
+ else:
+ selector = self.options["selector"]
+
+ try:
+ if selector == "max":
+ max_val = float("-inf")
+ for i in range(len(data["rows"])):
+ max_val = max(max_val, float(data["rows"][i][self.options["column"]]))
+ value = max_val
+ elif selector == "min":
+ min_val = float("inf")
+ for i in range(len(data["rows"])):
+ min_val = min(min_val, float(data["rows"][i][self.options["column"]]))
+ value = min_val
+ else:
+ value = data["rows"][0][self.options["column"]]
+
+ except ValueError:
+ return self.UNKNOWN_STATE
+
threshold = self.options["value"]
new_state = next_state(op, value, threshold)
@@ -1016,9 +995,7 @@ def evaluate(self):
return new_state
def subscribers(self):
- return User.query.join(AlertSubscription).filter(
- AlertSubscription.alert == self
- )
+ return User.query.join(AlertSubscription).filter(AlertSubscription.alert == self)
def render_template(self, template):
if template is None:
@@ -1033,21 +1010,24 @@ def render_template(self, template):
else:
result_value = None
+ result_table = [] # A two-dimensional array which can rendered as a table in Mustache
+ for row in data["rows"]:
+ result_table.append([row[col["name"]] for col in data["columns"]])
context = {
"ALERT_NAME": self.name,
"ALERT_URL": "{host}/alerts/{alert_id}".format(host=host, alert_id=self.id),
"ALERT_STATUS": self.state.upper(),
+ "ALERT_SELECTOR": self.options["selector"],
"ALERT_CONDITION": self.options["op"],
"ALERT_THRESHOLD": self.options["value"],
"QUERY_NAME": self.query_rel.name,
- "QUERY_URL": "{host}/queries/{query_id}".format(
- host=host, query_id=self.query_rel.id
- ),
+ "QUERY_URL": "{host}/queries/{query_id}".format(host=host, query_id=self.query_rel.id),
"QUERY_RESULT_VALUE": result_value,
"QUERY_RESULT_ROWS": data["rows"],
"QUERY_RESULT_COLS": data["columns"],
+ "QUERY_RESULT_TABLE": result_table,
}
- return mustache_render(template, context)
+ return mustache_render_escape(template, context)
@property
def custom_body(self):
@@ -1078,9 +1058,7 @@ def generate_slug(ctx):
@gfk_type
-@generic_repr(
- "id", "name", "slug", "user_id", "org_id", "version", "is_archived", "is_draft"
-)
+@generic_repr("id", "name", "slug", "user_id", "org_id", "version", "is_archived", "is_draft")
class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
id = primary_key("Dashboard")
version = Column(db.Integer)
@@ -1091,17 +1069,13 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
user = db.relationship(User)
# layout is no longer used, but kept so we know how to render old dashboards.
- layout = Column(db.Text)
+ layout = Column(MutableList.as_mutable(JSONB), default=[])
dashboard_filters_enabled = Column(db.Boolean, default=False)
is_archived = Column(db.Boolean, default=False, index=True)
is_draft = Column(db.Boolean, default=True, index=True)
widgets = db.relationship("Widget", backref="dashboard", lazy="dynamic")
- tags = Column(
- "tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True
- )
- options = Column(
- MutableDict.as_mutable(postgresql.JSON), server_default="{}", default={}
- )
+ tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
+ options = Column(MutableDict.as_mutable(JSONB), default={})
__tablename__ = "dashboards"
__mapper_args__ = {"version_id_col": version}
@@ -1124,31 +1098,22 @@ def all(cls, org, group_ids, user_id):
.outerjoin(Widget)
.outerjoin(Visualization)
.outerjoin(Query)
- .outerjoin(
- DataSourceGroup, Query.data_source_id == DataSourceGroup.data_source_id
- )
+ .outerjoin(DataSourceGroup, Query.data_source_id == DataSourceGroup.data_source_id)
.filter(
- Dashboard.is_archived == False,
- (
- DataSourceGroup.group_id.in_(group_ids)
- | (Dashboard.user_id == user_id)
- ),
+ Dashboard.is_archived.is_(False),
+ (DataSourceGroup.group_id.in_(group_ids) | (Dashboard.user_id == user_id)),
Dashboard.org == org,
)
)
- query = query.filter(
- or_(Dashboard.user_id == user_id, Dashboard.is_draft == False)
- )
+ query = query.filter(or_(Dashboard.user_id == user_id, Dashboard.is_draft.is_(False)))
return query
@classmethod
def search(cls, org, groups_ids, user_id, search_term):
# TODO: switch to FTS
- return cls.all(org, groups_ids, user_id).filter(
- cls.name.ilike("%{}%".format(search_term))
- )
+ return cls.all(org, groups_ids, user_id).filter(cls.name.ilike("%{}%".format(search_term)))
@classmethod
def search_by_user(cls, term, user, limit=None):
@@ -1191,6 +1156,21 @@ def by_user(cls, user):
def get_by_slug_and_org(cls, slug, org):
return cls.query.filter(cls.slug == slug, cls.org == org).one()
+ def fork(self, user):
+ forked_list = ["org", "layout", "dashboard_filters_enabled", "tags"]
+
+ kwargs = {a: getattr(self, a) for a in forked_list}
+ forked_dashboard = Dashboard(name="Copy of (#{}) {}".format(self.id, self.name), user=user, **kwargs)
+
+ for w in self.widgets:
+ forked_w = w.copy(forked_dashboard.id)
+ fw = Widget(**forked_w)
+ db.session.add(fw)
+
+ forked_dashboard.slug = forked_dashboard.id
+ db.session.add(forked_dashboard)
+ return forked_dashboard
+
@hybrid_property
def lowercase_name(self):
"Optional property useful for sorting purposes."
@@ -1215,7 +1195,7 @@ class Visualization(TimestampMixin, BelongsToOrgMixin, db.Model):
query_rel = db.relationship(Query, back_populates="visualizations")
name = Column(db.String(255))
description = Column(db.String(4096), nullable=True)
- options = Column(db.Text)
+ options = Column(MutableDict.as_mutable(JSONB), nullable=True)
__tablename__ = "visualizations"
@@ -1238,15 +1218,11 @@ def copy(self):
@generic_repr("id", "visualization_id", "dashboard_id")
class Widget(TimestampMixin, BelongsToOrgMixin, db.Model):
id = primary_key("Widget")
- visualization_id = Column(
- key_type("Visualization"), db.ForeignKey("visualizations.id"), nullable=True
- )
- visualization = db.relationship(
- Visualization, backref=backref("widgets", cascade="delete")
- )
+ visualization_id = Column(key_type("Visualization"), db.ForeignKey("visualizations.id"), nullable=True)
+ visualization = db.relationship(Visualization, backref=backref("widgets", cascade="delete"))
text = Column(db.Text, nullable=True)
width = Column(db.Integer)
- options = Column(db.Text)
+ options = Column(MutableDict.as_mutable(JSONB), default={})
dashboard_id = Column(key_type("Dashboard"), db.ForeignKey("dashboards.id"), index=True)
__tablename__ = "widgets"
@@ -1258,10 +1234,17 @@ def __str__(self):
def get_by_id_and_org(cls, object_id, org):
return super(Widget, cls).get_by_id_and_org(object_id, org, Dashboard)
+ def copy(self, dashboard_id):
+ return {
+ "options": self.options,
+ "width": self.width,
+ "text": self.text,
+ "visualization_id": self.visualization_id,
+ "dashboard_id": dashboard_id,
+ }
-@generic_repr(
- "id", "object_type", "object_id", "action", "user_id", "org_id", "created_at"
-)
+
+@generic_repr("id", "object_type", "object_id", "action", "user_id", "org_id", "created_at")
class Event(db.Model):
id = primary_key("Event")
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
@@ -1271,9 +1254,7 @@ class Event(db.Model):
action = Column(db.String(255))
object_type = Column(db.String(255))
object_id = Column(db.String(255), nullable=True)
- additional_properties = Column(
- MutableDict.as_mutable(PseudoJSON), nullable=True, default={}
- )
+ additional_properties = Column(MutableDict.as_mutable(JSONB), nullable=True, default={})
created_at = Column(db.DateTime(True), default=db.func.now())
__tablename__ = "events"
@@ -1333,20 +1314,18 @@ class ApiKey(TimestampMixin, GFKBase, db.Model):
created_by = db.relationship(User)
__tablename__ = "api_keys"
- __table_args__ = (
- db.Index("api_keys_object_type_object_id", "object_type", "object_id"),
- )
+ __table_args__ = (db.Index("api_keys_object_type_object_id", "object_type", "object_id"),)
@classmethod
def get_by_api_key(cls, api_key):
- return cls.query.filter(cls.api_key == api_key, cls.active == True).one()
+ return cls.query.filter(cls.api_key == api_key, cls.active.is_(True)).one()
@classmethod
def get_by_object(cls, object):
return cls.query.filter(
cls.object_type == object.__class__.__tablename__,
cls.object_id == object.id,
- cls.active == True,
+ cls.active.is_(True),
).first()
@classmethod
@@ -1368,19 +1347,13 @@ class NotificationDestination(BelongsToOrgMixin, db.Model):
options = Column(
"encrypted_options",
ConfigurationContainer.as_mutable(
- EncryptedConfiguration(
- db.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
- )
+ EncryptedConfiguration(db.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine)
),
)
created_at = Column(db.DateTime(True), default=db.func.now())
__tablename__ = "notification_destinations"
- __table_args__ = (
- db.Index(
- "notification_destinations_org_id_name", "org_id", "name", unique=True
- ),
- )
+ __table_args__ = (db.Index("notification_destinations_org_id_name", "org_id", "name", unique=True),)
def __str__(self):
return str(self.name)
@@ -1406,18 +1379,14 @@ def destination(self):
@classmethod
def all(cls, org):
- notification_destinations = cls.query.filter(cls.org == org).order_by(
- cls.id.asc()
- )
+ notification_destinations = cls.query.filter(cls.org == org).order_by(cls.id.asc())
return notification_destinations
- def notify(self, alert, query, user, new_state, app, host):
+ def notify(self, alert, query, user, new_state, app, host, metadata):
schema = get_configuration_schema_for_destination_type(self.type)
self.options.set_schema(schema)
- return self.destination.notify(
- alert, query, user, new_state, app, host, self.options
- )
+ return self.destination.notify(alert, query, user, new_state, app, host, metadata, self.options)
@generic_repr("id", "user_id", "destination_id", "alert_id")
@@ -1452,20 +1421,18 @@ def to_dict(self):
@classmethod
def all(cls, alert_id):
- return AlertSubscription.query.join(User).filter(
- AlertSubscription.alert_id == alert_id
- )
+ return AlertSubscription.query.join(User).filter(AlertSubscription.alert_id == alert_id)
- def notify(self, alert, query, user, new_state, app, host):
+ def notify(self, alert, query, user, new_state, app, host, metadata):
if self.destination:
- return self.destination.notify(alert, query, user, new_state, app, host)
+ return self.destination.notify(alert, query, user, new_state, app, host, metadata)
else:
# User email subscription, so create an email destination object
config = {"addresses": self.user.email}
schema = get_configuration_schema_for_destination_type("email")
options = ConfigurationContainer(config, schema)
destination = get_destination("email", options)
- return destination.notify(alert, query, user, new_state, app, host, options)
+ return destination.notify(alert, query, user, new_state, app, host, metadata, options)
@generic_repr("id", "trigger", "user_id", "org_id")
@@ -1503,7 +1470,7 @@ def init_db():
default_org = Organization(name="Default", slug="default", settings={})
admin_group = Group(
name="admin",
- permissions=["admin", "super_admin"],
+ permissions=Group.ADMIN_PERMISSIONS,
org=default_org,
type=Group.BUILTIN_GROUP,
)
diff --git a/redash/models/base.py b/redash/models/base.py
index e2b7e68277..2ed95c38fb 100644
--- a/redash/models/base.py
+++ b/redash/models/base.py
@@ -1,13 +1,13 @@
import functools
from flask_sqlalchemy import BaseQuery, SQLAlchemy
+from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import object_session
from sqlalchemy.pool import NullPool
-from sqlalchemy_searchable import make_searchable, vectorizer, SearchQueryMixin
-from sqlalchemy.dialects import postgresql
+from sqlalchemy_searchable import SearchQueryMixin, make_searchable, vectorizer
from redash import settings
-from redash.utils import json_dumps
+from redash.utils import json_dumps, json_loads
class RedashSQLAlchemy(SQLAlchemy):
@@ -15,7 +15,7 @@ def apply_driver_hacks(self, app, info, options):
options.update(json_serializer=json_dumps)
if settings.SQLALCHEMY_ENABLE_POOL_PRE_PING:
options.update(pool_pre_ping=True)
- super(RedashSQLAlchemy, self).apply_driver_hacks(app, info, options)
+ return super(RedashSQLAlchemy, self).apply_driver_hacks(app, info, options)
def apply_pool_defaults(self, app, options):
super(RedashSQLAlchemy, self).apply_pool_defaults(app, options)
@@ -25,9 +25,13 @@ def apply_pool_defaults(self, app, options):
options["poolclass"] = NullPool
# Remove options NullPool does not support:
options.pop("max_overflow", None)
+ return options
-db = RedashSQLAlchemy(session_options={"expire_on_commit": False})
+db = RedashSQLAlchemy(
+ session_options={"expire_on_commit": False},
+ engine_options={"json_serializer": json_dumps, "json_deserializer": json_loads},
+)
# Make sure the SQLAlchemy mappers are all properly configured first.
# This is required by SQLAlchemy-Searchable as it adds DDL listeners
# on the configuration phase of models.
@@ -35,7 +39,7 @@ def apply_pool_defaults(self, app, options):
# listen to a few database events to set up functions, trigger updates
# and indexes for the full text search
-make_searchable(options={"regconfig": "pg_catalog.simple"})
+make_searchable(db.metadata, options={"regconfig": "pg_catalog.simple"})
class SearchBaseQuery(BaseQuery, SearchQueryMixin):
@@ -49,7 +53,7 @@ def integer_vectorizer(column):
return db.func.cast(column, db.Text)
-@vectorizer(postgresql.UUID)
+@vectorizer(UUID)
def uuid_vectorizer(column):
return db.func.cast(column, db.Text)
@@ -67,7 +71,7 @@ def gfk_type(cls):
return cls
-class GFKBase(object):
+class GFKBase:
"""
Compatibility with 'generic foreign key' approach Peewee used.
"""
@@ -84,11 +88,7 @@ def object(self):
return self._object
else:
object_class = _gfk_types[self.object_type]
- self._object = (
- session.query(object_class)
- .filter(object_class.id == self.object_id)
- .first()
- )
+ self._object = session.query(object_class).filter(object_class.id == self.object_id).first()
return self._object
@object.setter
diff --git a/redash/models/changes.py b/redash/models/changes.py
index cf8be934d0..3858f91415 100644
--- a/redash/models/changes.py
+++ b/redash/models/changes.py
@@ -1,8 +1,8 @@
+from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.inspection import inspect
from sqlalchemy_utils.models import generic_repr
-from .base import GFKBase, db, Column, primary_key, key_type
-from .types import PseudoJSON
+from .base import Column, GFKBase, db, key_type, primary_key
@generic_repr("id", "object_type", "object_id", "created_at")
@@ -13,7 +13,7 @@ class Change(GFKBase, db.Model):
object_version = Column(db.Integer, default=0)
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
user = db.relationship("User", backref="changes")
- change = Column(PseudoJSON)
+ change = Column(JSONB)
created_at = Column(db.DateTime(True), default=db.func.now())
__tablename__ = "changes"
@@ -39,15 +39,13 @@ def to_dict(self, full=True):
@classmethod
def last_change(cls, obj):
return (
- cls.query.filter(
- cls.object_id == obj.id, cls.object_type == obj.__class__.__tablename__
- )
+ cls.query.filter(cls.object_id == obj.id, cls.object_type == obj.__class__.__tablename__)
.order_by(cls.object_version.desc())
.first()
)
-class ChangeTrackingMixin(object):
+class ChangeTrackingMixin:
skipped_fields = ("id", "created_at", "updated_at", "version")
_clean_values = None
diff --git a/redash/models/mixins.py b/redash/models/mixins.py
index 9116fe46de..e721554906 100644
--- a/redash/models/mixins.py
+++ b/redash/models/mixins.py
@@ -1,9 +1,9 @@
from sqlalchemy.event import listens_for
-from .base import db, Column
+from .base import Column, db
-class TimestampMixin(object):
+class TimestampMixin:
updated_at = Column(db.DateTime(True), default=db.func.now(), nullable=False)
created_at = Column(db.DateTime(True), default=db.func.now(), nullable=False)
@@ -17,7 +17,7 @@ def timestamp_before_update(mapper, connection, target):
target.updated_at = db.func.now()
-class BelongsToOrgMixin(object):
+class BelongsToOrgMixin:
@classmethod
def get_by_id_and_org(cls, object_id, org, org_cls=None):
query = cls.query.filter(cls.id == object_id)
diff --git a/redash/models/organizations.py b/redash/models/organizations.py
index 8cf670b250..bff1908a79 100644
--- a/redash/models/organizations.py
+++ b/redash/models/organizations.py
@@ -1,12 +1,13 @@
+from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy_utils.models import generic_repr
from redash.settings.organization import settings as org_settings
-from .base import db, Column, primary_key
+from .base import Column, db, primary_key
from .mixins import TimestampMixin
-from .types import MutableDict, PseudoJSON
-from .users import User, Group
+from .types import MutableDict
+from .users import Group, User
@generic_repr("id", "name", "slug")
@@ -17,7 +18,7 @@ class Organization(TimestampMixin, db.Model):
id = primary_key("Organization")
name = Column(db.String(255))
slug = Column(db.String(255), unique=True)
- settings = Column(MutableDict.as_mutable(PseudoJSON))
+ settings = Column(MutableDict.as_mutable(JSONB), default={})
groups = db.relationship("Group", lazy="dynamic")
events = db.relationship("Event", lazy="dynamic", order_by="desc(Event.created_at)")
@@ -88,9 +89,7 @@ def get_setting(self, key, raise_on_missing=True):
@property
def admin_group(self):
- return self.groups.filter(
- Group.name == "admin", Group.type == Group.BUILTIN_GROUP
- ).first()
+ return self.groups.filter(Group.name == "admin", Group.type == Group.BUILTIN_GROUP).first()
def has_user(self, email):
return self.users.filter(User.email == email).count() == 1
diff --git a/redash/models/parameterized_query.py b/redash/models/parameterized_query.py
index 0094c0aa7d..6799296675 100644
--- a/redash/models/parameterized_query.py
+++ b/redash/models/parameterized_query.py
@@ -1,10 +1,12 @@
-import pystache
+import re
from functools import partial
from numbers import Number
-from redash.utils import mustache_render, json_loads
-from redash.permissions import require_access, view_only
-from funcy import distinct
+
+import pystache
from dateutil.parser import parse
+from funcy import distinct
+
+from redash.utils import mustache_render
def _pluck_name_and_value(default_column, row):
@@ -21,9 +23,7 @@ def _load_result(query_id, org):
query = models.Query.get_by_id_and_org(query_id, org)
if query.data_source:
- query_result = models.QueryResult.get_by_id_and_org(
- query.latest_query_data_id, org
- )
+ query_result = models.QueryResult.get_by_id_and_org(query.latest_query_data_id, org)
return query_result.data
else:
raise QueryDetachedFromDataSourceError(query_id)
@@ -38,18 +38,14 @@ def dropdown_values(query_id, org):
def join_parameter_list_values(parameters, schema):
updated_parameters = {}
- for (key, value) in parameters.items():
+ for key, value in parameters.items():
if isinstance(value, list):
- definition = next(
- (definition for definition in schema if definition["name"] == key), {}
- )
+ definition = next((definition for definition in schema if definition["name"] == key), {})
multi_values_options = definition.get("multiValuesOptions", {})
separator = str(multi_values_options.get("separator", ","))
prefix = str(multi_values_options.get("prefix", ""))
suffix = str(multi_values_options.get("suffix", ""))
- updated_parameters[key] = separator.join(
- [prefix + v + suffix for v in value]
- )
+ updated_parameters[key] = separator.join([prefix + v + suffix for v in value])
else:
updated_parameters[key] = value
return updated_parameters
@@ -89,26 +85,27 @@ def _is_number(string):
if isinstance(string, Number):
return True
else:
- try:
- float(string)
+ float(string)
+ return True
+
+
+def _is_regex_pattern(value, regex):
+ try:
+ if re.compile(regex).fullmatch(value):
return True
- except ValueError:
+ else:
return False
+ except re.error:
+ return False
def _is_date(string):
- try:
- parse(string)
- return True
- except (ValueError, TypeError):
- return False
+ parse(string)
+ return True
def _is_date_range(obj):
- try:
- return _is_date(obj["start"]) and _is_date(obj["end"])
- except (KeyError, TypeError):
- return False
+ return _is_date(obj["start"]) and _is_date(obj["end"])
def _is_value_within_options(value, dropdown_options, allow_list=False):
@@ -117,7 +114,7 @@ def _is_value_within_options(value, dropdown_options, allow_list=False):
return str(value) in dropdown_options
-class ParameterizedQuery(object):
+class ParameterizedQuery:
def __init__(self, template, schema=None, org=None):
self.schema = schema or []
self.org = org
@@ -126,16 +123,12 @@ def __init__(self, template, schema=None, org=None):
self.parameters = {}
def apply(self, parameters):
- invalid_parameter_names = [
- key for (key, value) in parameters.items() if not self._valid(key, value)
- ]
+ invalid_parameter_names = [key for (key, value) in parameters.items() if not self._valid(key, value)]
if invalid_parameter_names:
raise InvalidParameterError(invalid_parameter_names)
else:
self.parameters.update(parameters)
- self.query = mustache_render(
- self.template, join_parameter_list_values(parameters, self.schema)
- )
+ self.query = mustache_render(self.template, join_parameter_list_values(parameters, self.schema))
return self
@@ -153,6 +146,7 @@ def _valid(self, name, value):
enum_options = definition.get("enumOptions")
query_id = definition.get("queryId")
+ regex = definition.get("regex")
allow_multiple_values = isinstance(definition.get("multiValuesOptions"), dict)
if isinstance(enum_options, str):
@@ -160,10 +154,9 @@ def _valid(self, name, value):
validators = {
"text": lambda value: isinstance(value, str),
+ "text-pattern": lambda value: _is_regex_pattern(value, regex),
"number": _is_number,
- "enum": lambda value: _is_value_within_options(
- value, enum_options, allow_multiple_values
- ),
+ "enum": lambda value: _is_value_within_options(value, enum_options, allow_multiple_values),
"query": lambda value: _is_value_within_options(
value,
[v["value"] for v in dropdown_values(query_id, self.org)],
@@ -179,7 +172,14 @@ def _valid(self, name, value):
validate = validators.get(definition["type"], lambda x: False)
- return validate(value)
+ try:
+ # multiple error types can be raised here; but we want to convert
+ # all except QueryDetached to InvalidParameterError in `apply`
+ return validate(value)
+ except QueryDetachedFromDataSourceError:
+ raise
+ except Exception:
+ return False
@property
def is_safe(self):
@@ -199,9 +199,7 @@ def text(self):
class InvalidParameterError(Exception):
def __init__(self, parameters):
parameter_names = ", ".join(parameters)
- message = "The following parameter values are incompatible with their definitions: {}".format(
- parameter_names
- )
+ message = "The following parameter values are incompatible with their definitions: {}".format(parameter_names)
super(InvalidParameterError, self).__init__(message)
diff --git a/redash/models/types.py b/redash/models/types.py
index dd7873ab06..b3aa467dcf 100644
--- a/redash/models/types.py
+++ b/redash/models/types.py
@@ -1,10 +1,7 @@
-import pytz
-from sqlalchemy.types import TypeDecorator
from sqlalchemy.ext.indexable import index_property
from sqlalchemy.ext.mutable import Mutable
+from sqlalchemy.types import TypeDecorator
from sqlalchemy_utils import EncryptedType
-from sqlalchemy import cast
-from sqlalchemy.dialects.postgresql import JSON
from redash.utils import json_dumps, json_loads
from redash.utils.configuration import ConfigurationContainer
@@ -24,9 +21,7 @@ def process_result_value(self, value, dialect):
class EncryptedConfiguration(EncryptedType):
def process_bind_param(self, value, dialect):
- return super(EncryptedConfiguration, self).process_bind_param(
- value.to_json(), dialect
- )
+ return super(EncryptedConfiguration, self).process_bind_param(value.to_json(), dialect)
def process_result_value(self, value, dialect):
return ConfigurationContainer.from_json(
@@ -34,8 +29,8 @@ def process_result_value(self, value, dialect):
)
-# XXX replace PseudoJSON and MutableDict with real JSON field
-class PseudoJSON(TypeDecorator):
+# Utilized for cases when JSON size is bigger than JSONB (255MB) or JSON (10MB) limit
+class JSONText(TypeDecorator):
impl = db.Text
def process_bind_param(self, value, dialect):
@@ -110,17 +105,3 @@ def __init__(self, cast_type, *args, **kwargs):
def expr(self, model):
expr = super(json_cast_property, self).expr(model)
return expr.astext.cast(self.cast_type)
-
-
-class pseudo_json_cast_property(index_property):
- """
- A SQLAlchemy index property that is able to cast the
- entity attribute as the specified cast type. Useful
- for PseudoJSON colums for easier querying/filtering.
- """
- def __init__(self, cast_type, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self.cast_type = cast_type
- def expr(self, model):
- expr = cast(getattr(model, self.attr_name), JSON)[self.index]
- return expr.astext.cast(self.cast_type)
diff --git a/redash/models/users.py b/redash/models/users.py
index 5ddd3e2594..ad51ba90c0 100644
--- a/redash/models/users.py
+++ b/redash/models/users.py
@@ -5,21 +5,19 @@
from functools import reduce
from operator import or_
-from flask import current_app as app, url_for, request_started
-from flask_login import current_user, AnonymousUserMixin, UserMixin
+from flask import current_app, request_started, url_for
+from flask_login import AnonymousUserMixin, UserMixin, current_user
from passlib.apps import custom_app_context as pwd_context
-from sqlalchemy.exc import DBAPIError
-from sqlalchemy.dialects import postgresql
-
+from sqlalchemy.dialects.postgresql import ARRAY, JSONB
from sqlalchemy_utils import EmailType
from sqlalchemy_utils.models import generic_repr
from redash import redis_connection
-from redash.utils import generate_token, utcnow, dt_from_timestamp
+from redash.utils import dt_from_timestamp, generate_token
-from .base import db, Column, GFKBase, key_type, primary_key
-from .mixins import TimestampMixin, BelongsToOrgMixin
-from .types import json_cast_property, MutableDict, MutableList
+from .base import Column, GFKBase, db, key_type, primary_key
+from .mixins import BelongsToOrgMixin, TimestampMixin
+from .types import MutableDict, MutableList, json_cast_property
logger = logging.getLogger(__name__)
@@ -62,7 +60,7 @@ def init_app(app):
request_started.connect(update_user_active_at, app)
-class PermissionsCheckMixin(object):
+class PermissionsCheckMixin:
def has_permission(self, permission):
return self.has_permissions((permission,))
@@ -77,37 +75,31 @@ def has_permissions(self, permissions):
@generic_repr("id", "name", "email")
-class User(
- TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCheckMixin
-):
+class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCheckMixin):
id = primary_key("User")
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
org = db.relationship("Organization", backref=db.backref("users", lazy="dynamic"))
name = Column(db.String(320))
email = Column(EmailType)
- _profile_image_url = Column("profile_image_url", db.String(320), nullable=True)
password_hash = Column(db.String(128), nullable=True)
group_ids = Column(
- "groups", MutableList.as_mutable(postgresql.ARRAY(key_type("Group"))), nullable=True
+ "groups",
+ MutableList.as_mutable(ARRAY(key_type("Group"))),
+ nullable=True,
)
api_key = Column(db.String(40), default=lambda: generate_token(40), unique=False)
disabled_at = Column(db.DateTime(True), default=None, nullable=True)
details = Column(
- MutableDict.as_mutable(postgresql.JSON),
+ MutableDict.as_mutable(JSONB),
nullable=True,
server_default="{}",
default={},
)
- active_at = json_cast_property(
- db.DateTime(True), "details", "active_at", default=None
- )
- is_invitation_pending = json_cast_property(
- db.Boolean(True), "details", "is_invitation_pending", default=False
- )
- is_email_verified = json_cast_property(
- db.Boolean(True), "details", "is_email_verified", default=True
- )
+ active_at = json_cast_property(db.DateTime(True), "details", "active_at", default=None)
+ _profile_image_url = json_cast_property(db.Text(), "details", "profile_image_url", default=None)
+ is_invitation_pending = json_cast_property(db.Boolean(True), "details", "is_invitation_pending", default=False)
+ is_email_verified = json_cast_property(db.Boolean(True), "details", "is_email_verified", default=True)
__tablename__ = "users"
__table_args__ = (db.Index("users_org_id_email", "org_id", "email", unique=True),)
@@ -136,7 +128,7 @@ def regenerate_api_key(self):
def to_dict(self, with_api_key=False):
profile_image_url = self.profile_image_url
if self.is_disabled:
- assets = app.extensions["webpack"]["assets"] or {}
+ assets = current_app.extensions["webpack"]["assets"] or {}
path = "images/avatar.svg"
profile_image_url = url_for("static", filename=assets.get(path, path))
@@ -165,28 +157,22 @@ def to_dict(self, with_api_key=False):
return d
- def is_api_user(self):
+ @staticmethod
+ def is_api_user():
return False
@property
def profile_image_url(self):
- if self._profile_image_url is not None:
+ if self._profile_image_url:
return self._profile_image_url
- email_md5 = hashlib.md5(self.email.lower().encode()).hexdigest()
+ email_md5 = hashlib.md5(self.email.lower().encode(), usedforsecurity=False).hexdigest()
return "https://www.gravatar.com/avatar/{}?s=40&d=identicon".format(email_md5)
@property
def permissions(self):
# TODO: this should be cached.
- return list(
- itertools.chain(
- *[
- g.permissions
- for g in Group.query.filter(Group.id.in_(self.group_ids))
- ]
- )
- )
+ return list(itertools.chain(*[g.permissions for g in Group.query.filter(Group.id.in_(self.group_ids))]))
@classmethod
def get_by_org(cls, org):
@@ -224,16 +210,14 @@ def pending(cls, base_query, pending):
if pending:
return base_query.filter(cls.is_invitation_pending.is_(True))
else:
- return base_query.filter(
- cls.is_invitation_pending.isnot(True)
- ) # check for both `false`/`null`
+ return base_query.filter(cls.is_invitation_pending.isnot(True)) # check for both `false`/`null`
@classmethod
def find_by_email(cls, email):
return cls.query.filter(cls.email == email)
def hash_password(self, password):
- self.password_hash = pwd_context.encrypt(password)
+ self.password_hash = pwd_context.hash(password)
def verify_password(self, password):
return self.password_hash and pwd_context.verify(password, self.password_hash)
@@ -250,10 +234,13 @@ def has_access(self, obj, access_type):
def get_id(self):
identity = hashlib.md5(
- "{},{}".format(self.email, self.password_hash).encode()
+ "{},{}".format(self.email, self.password_hash).encode(), usedforsecurity=False
).hexdigest()
return "{0}-{1}".format(self.id, identity)
+ def get_actual_user(self):
+ return repr(self) if self.is_api_user() else self.email
+
@generic_repr("id", "name", "type", "org_id")
class Group(db.Model, BelongsToOrgMixin):
@@ -271,19 +258,18 @@ class Group(db.Model, BelongsToOrgMixin):
"list_alerts",
"list_data_sources",
]
+ ADMIN_PERMISSIONS = ["admin", "super_admin"]
BUILTIN_GROUP = "builtin"
REGULAR_GROUP = "regular"
id = primary_key("Group")
- data_sources = db.relationship(
- "DataSourceGroup", back_populates="group", cascade="all"
- )
+ data_sources = db.relationship("DataSourceGroup", back_populates="group", cascade="all")
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
org = db.relationship("Organization", back_populates="groups")
type = Column(db.String(255), default=REGULAR_GROUP)
name = Column(db.String(100))
- permissions = Column(postgresql.ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS)
+ permissions = Column(ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS)
created_at = Column(db.DateTime(True), default=db.func.now())
__tablename__ = "groups"
@@ -314,9 +300,7 @@ def find_by_name(cls, org, group_names):
return list(result)
-@generic_repr(
- "id", "object_type", "object_id", "access_type", "grantor_id", "grantee_id"
-)
+@generic_repr("id", "object_type", "object_id", "access_type", "grantor_id", "grantee_id")
class AccessPermission(GFKBase, db.Model):
id = primary_key("AccessPermission")
# 'object' defined in GFKBase
@@ -365,9 +349,7 @@ def exists(cls, obj, access_type, grantee):
@classmethod
def _query(cls, obj, access_type=None, grantee=None, grantor=None):
- q = cls.query.filter(
- cls.object_id == obj.id, cls.object_type == obj.__tablename__
- )
+ q = cls.query.filter(cls.object_id == obj.id, cls.object_type == obj.__tablename__)
if access_type:
q = q.filter(AccessPermission.access_type == access_type)
@@ -397,7 +379,8 @@ class AnonymousUser(AnonymousUserMixin, PermissionsCheckMixin):
def permissions(self):
return []
- def is_api_user(self):
+ @staticmethod
+ def is_api_user():
return False
@@ -417,7 +400,8 @@ def __init__(self, api_key, org, groups, name=None):
def __repr__(self):
return "<{}>".format(self.name)
- def is_api_user(self):
+ @staticmethod
+ def is_api_user():
return True
@property
@@ -430,5 +414,9 @@ def org_id(self):
def permissions(self):
return ["view_query"]
- def has_access(self, obj, access_type):
+ @staticmethod
+ def has_access(obj, access_type):
return False
+
+ def get_actual_user(self):
+ return repr(self)
diff --git a/redash/monitor.py b/redash/monitor.py
index 41f3dca122..77521975c5 100644
--- a/redash/monitor.py
+++ b/redash/monitor.py
@@ -1,14 +1,11 @@
-from __future__ import absolute_import
-import itertools
from funcy import flatten
-from sqlalchemy import union_all
-from redash import redis_connection, rq_redis_connection, __version__, settings
-from redash.models import db, DataSource, Query, QueryResult, Dashboard, Widget
-from redash.utils import json_loads
from rq import Queue, Worker
from rq.job import Job
from rq.registry import StartedJobRegistry
+from redash import __version__, redis_connection, rq_redis_connection, settings
+from redash.models import Dashboard, Query, QueryResult, Widget, db
+
def get_redis_status():
info = redis_connection.info()
@@ -23,7 +20,7 @@ def get_object_counts():
status["queries_count"] = Query.query.count()
if settings.FEATURE_SHOW_QUERY_RESULTS_COUNT:
status["query_results_count"] = QueryResult.query.count()
- status["unused_query_results_count"] = QueryResult.unused().count()
+ status["unused_query_results_count"] = QueryResult.unused(settings.QUERY_RESULTS_CLEANUP_MAX_AGE).count()
status["dashboards_count"] = Dashboard.query.count()
status["widgets_count"] = Widget.query.count()
return status
diff --git a/redash/permissions.py b/redash/permissions.py
index 92c34dc157..cca017d19a 100644
--- a/redash/permissions.py
+++ b/redash/permissions.py
@@ -54,7 +54,7 @@ def require_access(obj, user, need_view_only):
abort(403)
-class require_permissions(object):
+class require_permissions:
def __init__(self, permissions, allow_one=False):
self.permissions = permissions
self.allow_one = allow_one
@@ -92,9 +92,7 @@ def require_super_admin(fn):
def has_permission_or_owner(permission, object_owner_id):
- return int(object_owner_id) == current_user.id or current_user.has_permission(
- permission
- )
+ return int(object_owner_id) == current_user.id or current_user.has_permission(permission)
def is_admin_or_owner(object_owner_id):
diff --git a/redash/query_runner/__init__.py b/redash/query_runner/__init__.py
index 6f8ef389b1..7e51eec9c1 100644
--- a/redash/query_runner/__init__.py
+++ b/redash/query_runner/__init__.py
@@ -1,17 +1,12 @@
import logging
-
+from collections import defaultdict
from contextlib import ExitStack
-from dateutil import parser
from functools import wraps
-import socket
-import ipaddress
-from urllib.parse import urlparse
-from six import text_type
-from sshtunnel import open_tunnel
-from redash import settings, utils
-from redash.utils import json_loads, query_is_select_no_limit, add_limit_to_query
+import sqlparse
+from dateutil import parser
from rq.timeouts import JobTimeoutException
+from sshtunnel import open_tunnel
from redash.utils.requests_session import requests_or_advocate, requests_session, UnacceptableAddressException
@@ -45,9 +40,65 @@
TYPE_DATETIME = "datetime"
TYPE_DATE = "date"
-SUPPORTED_COLUMN_TYPES = set(
- [TYPE_INTEGER, TYPE_FLOAT, TYPE_BOOLEAN, TYPE_STRING, TYPE_DATETIME, TYPE_DATE]
-)
+SUPPORTED_COLUMN_TYPES = set([TYPE_INTEGER, TYPE_FLOAT, TYPE_BOOLEAN, TYPE_STRING, TYPE_DATETIME, TYPE_DATE])
+
+
+def split_sql_statements(query):
+ def strip_trailing_comments(stmt):
+ idx = len(stmt.tokens) - 1
+ while idx >= 0:
+ tok = stmt.tokens[idx]
+ if tok.is_whitespace or sqlparse.utils.imt(tok, i=sqlparse.sql.Comment, t=sqlparse.tokens.Comment):
+ stmt.tokens[idx] = sqlparse.sql.Token(sqlparse.tokens.Whitespace, " ")
+ else:
+ break
+ idx -= 1
+ return stmt
+
+ def strip_trailing_semicolon(stmt):
+ idx = len(stmt.tokens) - 1
+ while idx >= 0:
+ tok = stmt.tokens[idx]
+ # we expect that trailing comments already are removed
+ if not tok.is_whitespace:
+ if sqlparse.utils.imt(tok, t=sqlparse.tokens.Punctuation) and tok.value == ";":
+ stmt.tokens[idx] = sqlparse.sql.Token(sqlparse.tokens.Whitespace, " ")
+ break
+ idx -= 1
+ return stmt
+
+ def is_empty_statement(stmt):
+ # copy statement object. `copy.deepcopy` fails to do this, so just re-parse it
+ st = sqlparse.engine.FilterStack()
+ st.stmtprocess.append(sqlparse.filters.StripCommentsFilter())
+ stmt = next(st.run(str(stmt)), None)
+ if stmt is None:
+ return True
+
+ return str(stmt).strip() == ""
+
+ stack = sqlparse.engine.FilterStack()
+
+ result = [stmt for stmt in stack.run(query)]
+ result = [strip_trailing_comments(stmt) for stmt in result]
+ result = [strip_trailing_semicolon(stmt) for stmt in result]
+ result = [str(stmt).strip() for stmt in result if not is_empty_statement(stmt)]
+
+ if len(result) > 0:
+ return result
+
+ return [""] # if all statements were empty - return a single empty statement
+
+
+def combine_sql_statements(queries):
+ return ";\n".join(queries)
+
+
+def find_last_keyword_idx(parsed_query):
+ for i in reversed(range(len(parsed_query.tokens))):
+ if parsed_query.tokens[i].ttype in sqlparse.tokens.Keyword:
+ return i
+ return -1
class InterruptException(Exception):
@@ -58,10 +109,13 @@ class NotSupported(Exception):
pass
-class BaseQueryRunner(object):
+class BaseQueryRunner:
deprecated = False
should_annotate_query = True
noop_query = None
+ limit_query = " LIMIT 1000"
+ limit_keywords = ["LIMIT", "OFFSET"]
+ limit_after_select = False
def __init__(self, configuration):
self.syntax = "sql"
@@ -155,32 +209,37 @@ def run_query(self, query, user):
raise NotImplementedError()
def fetch_columns(self, columns):
- column_names = []
- duplicates_counter = 1
+ column_names = set()
+ duplicates_counters = defaultdict(int)
new_columns = []
for col in columns:
column_name = col[0]
- if column_name in column_names:
- column_name = "{}{}".format(column_name, duplicates_counter)
- duplicates_counter += 1
+ while column_name in column_names:
+ duplicates_counters[col[0]] += 1
+ column_name = "{}{}".format(col[0], duplicates_counters[col[0]])
- column_names.append(column_name)
- new_columns.append(
- {"name": column_name, "friendly_name": column_name, "type": col[1]}
- )
+ column_names.add(column_name)
+ new_columns.append({"name": column_name, "friendly_name": column_name, "type": col[1]})
return new_columns
def get_schema(self, get_stats=False):
raise NotSupported()
+ def _handle_run_query_error(self, error):
+ if error is None:
+ return
+
+ logger.error(error)
+ raise Exception(f"Error during query execution. Reason: {error}")
+
def _run_query_internal(self, query):
results, error = self.run_query(query, None)
if error is not None:
raise Exception("Failed running query [%s]." % query)
- return json_loads(results)["rows"]
+ return results["rows"]
@classmethod
def to_dict(cls):
@@ -216,7 +275,7 @@ def _get_tables(self, schema_dict):
def _get_tables_stats(self, tables_dict):
for t in tables_dict.keys():
- if type(tables_dict[t]) == dict:
+ if isinstance(tables_dict[t], dict):
res = self._run_query_internal("select count(*) as cnt from %s" % t)
tables_dict[t]["size"] = res[0]["cnt"]
@@ -224,10 +283,39 @@ def _get_tables_stats(self, tables_dict):
def supports_auto_limit(self):
return True
+ def query_is_select_no_limit(self, query):
+ parsed_query = sqlparse.parse(query)[0]
+ last_keyword_idx = find_last_keyword_idx(parsed_query)
+ # Either invalid query or query that is not select
+ if last_keyword_idx == -1 or parsed_query.tokens[0].value.upper() != "SELECT":
+ return False
+
+ no_limit = parsed_query.tokens[last_keyword_idx].value.upper() not in self.limit_keywords
+
+ return no_limit
+
+ def add_limit_to_query(self, query):
+ parsed_query = sqlparse.parse(query)[0]
+ limit_tokens = sqlparse.parse(self.limit_query)[0].tokens
+ length = len(parsed_query.tokens)
+ if not self.limit_after_select:
+ if parsed_query.tokens[length - 1].ttype == sqlparse.tokens.Punctuation:
+ parsed_query.tokens[length - 1 : length - 1] = limit_tokens
+ else:
+ parsed_query.tokens += limit_tokens
+ else:
+ for i in range(length - 1, -1, -1):
+ if parsed_query[i].value.upper() == "SELECT":
+ index = parsed_query.token_index(parsed_query[i + 1])
+ parsed_query = sqlparse.sql.Statement(
+ parsed_query.tokens[:index] + limit_tokens + parsed_query.tokens[index:]
+ )
+ break
+ return str(parsed_query)
+
def apply_auto_limit(self, query_text, should_apply_auto_limit):
+ queries = split_sql_statements(query_text)
if should_apply_auto_limit:
- from redash.query_runner.databricks import split_sql_statements, combine_sql_statements
- queries = split_sql_statements(query_text)
# we only check for last one in the list because it is the one that we show result
last_query = queries[-1]
if query_is_select_no_limit(last_query):
@@ -280,7 +368,6 @@ def get_auth(self):
return None
def get_response(self, url, auth=None, http_method="get", **kwargs):
-
# Get authentication values if not given
if auth is None:
auth = self.get_auth()
@@ -404,9 +491,7 @@ def wrapper(*args, **kwargs):
try:
remote_host, remote_port = query_runner.host, query_runner.port
except NotImplementedError:
- raise NotImplementedError(
- "SSH tunneling is not implemented for this query runner yet."
- )
+ raise NotImplementedError("SSH tunneling is not implemented for this query runner yet.")
stack = ExitStack()
try:
@@ -416,11 +501,7 @@ def wrapper(*args, **kwargs):
"ssh_username": details["ssh_username"],
**settings.dynamic_settings.ssh_tunnel_auth(),
}
- server = stack.enter_context(
- open_tunnel(
- bastion_address, remote_bind_address=remote_address, **auth
- )
- )
+ server = stack.enter_context(open_tunnel(bastion_address, remote_bind_address=remote_address, **auth))
except Exception as error:
raise type(error)("SSH tunnel: {}".format(str(error)))
diff --git a/redash/query_runner/amazon_elasticsearch.py b/redash/query_runner/amazon_elasticsearch.py
index cf81969874..1b36cad75c 100644
--- a/redash/query_runner/amazon_elasticsearch.py
+++ b/redash/query_runner/amazon_elasticsearch.py
@@ -1,16 +1,16 @@
-from .elasticsearch import ElasticSearch
from . import register
+from .elasticsearch2 import ElasticSearch2
try:
+ from botocore import credentials, session
from requests_aws_sign import AWSV4Sign
- from botocore import session, credentials
enabled = True
except ImportError:
enabled = False
-class AmazonElasticsearchService(ElasticSearch):
+class AmazonElasticsearchService(ElasticSearch2):
@classmethod
def name(cls):
return "Amazon Elasticsearch Service"
@@ -63,5 +63,8 @@ def __init__(self, configuration):
self.auth = AWSV4Sign(cred, region, "es")
+ def get_auth(self):
+ return self.auth
+
register(AmazonElasticsearchService)
diff --git a/redash/query_runner/arango.py b/redash/query_runner/arango.py
new file mode 100644
index 0000000000..c47e7cd3b1
--- /dev/null
+++ b/redash/query_runner/arango.py
@@ -0,0 +1,90 @@
+import logging
+
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_FLOAT,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
+
+logger = logging.getLogger(__name__)
+
+try:
+ from arango import ArangoClient
+
+ enabled = True
+except ImportError:
+ enabled = False
+
+
+_TYPE_MAPPINGS = {
+ "boolean": TYPE_BOOLEAN,
+ "number": TYPE_FLOAT,
+ "string": TYPE_STRING,
+ "array": TYPE_STRING,
+ "object": TYPE_STRING,
+}
+
+
+class Arango(BaseQueryRunner):
+ noop_query = "RETURN {'id': 1}"
+
+ @classmethod
+ def name(cls):
+ return "ArangoDB"
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "user": {"type": "string"},
+ "password": {"type": "string"},
+ "host": {"type": "string", "default": "127.0.0.1"},
+ "port": {"type": "number", "default": 8529},
+ "dbname": {"type": "string", "title": "Database Name"},
+ "timeout": {"type": "number", "default": 0.0, "title": "AQL Timeout in seconds (0 = no timeout)"},
+ },
+ "order": ["host", "port", "user", "password", "dbname"],
+ "required": ["host", "user", "password", "dbname"],
+ "secret": ["password"],
+ }
+
+ @classmethod
+ def enabled(cls):
+ try:
+ import arango # noqa: F401
+ except ImportError:
+ return False
+
+ return True
+
+ @classmethod
+ def type(cls):
+ return "arangodb"
+
+ def run_query(self, query, user):
+ client = ArangoClient(hosts="{}:{}".format(self.configuration["host"], self.configuration.get("port", 8529)))
+ db = client.db(
+ self.configuration["dbname"], username=self.configuration["user"], password=self.configuration["password"]
+ )
+
+ try:
+ cursor = db.aql.execute(query, max_runtime=self.configuration.get("timeout", 0.0))
+ result = [i for i in cursor]
+ column_tuples = [(i, TYPE_STRING) for i in result[0].keys()]
+ columns = self.fetch_columns(column_tuples)
+ data = {
+ "columns": columns,
+ "rows": result,
+ }
+
+ error = None
+ except Exception:
+ raise
+
+ return data, error
+
+
+register(Arango)
diff --git a/redash/query_runner/athena.py b/redash/query_runner/athena.py
index 6145cb4a84..8ff9b3d1af 100644
--- a/redash/query_runner/athena.py
+++ b/redash/query_runner/athena.py
@@ -1,23 +1,27 @@
import logging
import os
-from redash.query_runner import *
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
from redash.settings import parse_boolean
-from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
ANNOTATE_QUERY = parse_boolean(os.environ.get("ATHENA_ANNOTATE_QUERY", "true"))
-SHOW_EXTRA_SETTINGS = parse_boolean(
- os.environ.get("ATHENA_SHOW_EXTRA_SETTINGS", "true")
-)
+SHOW_EXTRA_SETTINGS = parse_boolean(os.environ.get("ATHENA_SHOW_EXTRA_SETTINGS", "true"))
ASSUME_ROLE = parse_boolean(os.environ.get("ATHENA_ASSUME_ROLE", "false"))
-OPTIONAL_CREDENTIALS = parse_boolean(
- os.environ.get("ATHENA_OPTIONAL_CREDENTIALS", "true")
-)
+OPTIONAL_CREDENTIALS = parse_boolean(os.environ.get("ATHENA_OPTIONAL_CREDENTIALS", "true"))
try:
- import pyathena
import boto3
+ import pyathena
enabled = True
except ImportError:
@@ -42,7 +46,7 @@
}
-class SimpleFormatter(object):
+class SimpleFormatter:
def format(self, operation, parameters=None):
return operation
@@ -72,6 +76,10 @@ def configuration_schema(cls):
"default": "default",
},
"glue": {"type": "boolean", "title": "Use Glue Data Catalog"},
+ "catalog_ids": {
+ "type": "string",
+ "title": "Enter Glue Data Catalog IDs, separated by commas (leave blank for default catalog)",
+ },
"work_group": {
"type": "string",
"title": "Athena Work Group",
@@ -84,7 +92,7 @@ def configuration_schema(cls):
},
},
"required": ["region", "s3_staging_dir"],
- "extra_options": ["glue", "cost_per_tb"],
+ "extra_options": ["glue", "catalog_ids", "cost_per_tb"],
"order": [
"region",
"s3_staging_dir",
@@ -168,52 +176,66 @@ def _get_iam_credentials(self, user=None):
"region_name": self.configuration["region"],
}
- def __get_schema_from_glue(self):
+ def __get_schema_from_glue(self, catalog_id=""):
client = boto3.client("glue", **self._get_iam_credentials())
schema = {}
database_paginator = client.get_paginator("get_databases")
table_paginator = client.get_paginator("get_tables")
- for databases in database_paginator.paginate():
+ databases_iterator = database_paginator.paginate(
+ **({"CatalogId": catalog_id} if catalog_id != "" else {}),
+ )
+
+ for databases in databases_iterator:
for database in databases["DatabaseList"]:
- iterator = table_paginator.paginate(DatabaseName=database["Name"])
+ iterator = table_paginator.paginate(
+ DatabaseName=database["Name"],
+ **({"CatalogId": catalog_id} if catalog_id != "" else {}),
+ )
for table in iterator.search("TableList[]"):
table_name = "%s.%s" % (database["Name"], table["Name"])
- if 'StorageDescriptor' not in table:
+ if "StorageDescriptor" not in table:
logger.warning("Glue table doesn't have StorageDescriptor: %s", table_name)
continue
if table_name not in schema:
- column = [
- columns["Name"]
- for columns in table["StorageDescriptor"]["Columns"]
- ]
- schema[table_name] = {"name": table_name, "columns": column}
- for partition in table.get("PartitionKeys", []):
- schema[table_name]["columns"].append(partition["Name"])
+ schema[table_name] = {"name": table_name, "columns": []}
+
+ for column_data in table["StorageDescriptor"]["Columns"]:
+ column = {
+ "name": column_data["Name"],
+ "type": column_data["Type"] if "Type" in column_data else None,
+ }
+ schema[table_name]["columns"].append(column)
+ for partition in table.get("PartitionKeys", []):
+ partition_column = {
+ "name": partition["Name"],
+ "type": partition["Type"] if "Type" in partition else None,
+ }
+ schema[table_name]["columns"].append(partition_column)
return list(schema.values())
def get_schema(self, get_stats=False):
if self.configuration.get("glue", False):
- return self.__get_schema_from_glue()
+ catalog_ids = [id.strip() for id in self.configuration.get("catalog_ids", "").split(",")]
+ return sum([self.__get_schema_from_glue(catalog_id) for catalog_id in catalog_ids], [])
schema = {}
query = """
- SELECT table_schema, table_name, column_name
+ SELECT table_schema, table_name, column_name, data_type
FROM information_schema.columns
WHERE table_schema NOT IN ('information_schema')
"""
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
+ self._handle_run_query_error(error)
- results = json_loads(results)
for row in results["rows"]:
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
- schema[table_name]["columns"].append(row["column_name"])
+ schema[table_name]["columns"].append({"name": row["column_name"], "type": row["data_type"]})
return list(schema.values())
@@ -225,19 +247,14 @@ def run_query(self, query, user):
kms_key=self.configuration.get("kms_key", None),
work_group=self.configuration.get("work_group", "primary"),
formatter=SimpleFormatter(),
- **self._get_iam_credentials(user=user)
+ **self._get_iam_credentials(user=user),
).cursor()
try:
cursor.execute(query)
- column_tuples = [
- (i[0], _TYPE_MAPPINGS.get(i[1], None)) for i in cursor.description
- ]
+ column_tuples = [(i[0], _TYPE_MAPPINGS.get(i[1], None)) for i in cursor.description]
columns = self.fetch_columns(column_tuples)
- rows = [
- dict(zip(([c["name"] for c in columns]), r))
- for i, r in enumerate(cursor.fetchall())
- ]
+ rows = [dict(zip(([c["name"] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
qbytes = None
athena_query_id = None
try:
@@ -260,14 +277,13 @@ def run_query(self, query, user):
},
}
- json_data = json_dumps(data, ignore_nan=True)
error = None
except Exception:
if cursor.query_id:
cursor.cancel()
raise
- return json_data, error
+ return data, error
register(Athena)
diff --git a/redash/query_runner/axibase_tsd.py b/redash/query_runner/axibase_tsd.py
index ecb1af5cba..3c535c4568 100644
--- a/redash/query_runner/axibase_tsd.py
+++ b/redash/query_runner/axibase_tsd.py
@@ -1,18 +1,26 @@
-from io import StringIO
+import csv
import logging
-import sys
import uuid
-import csv
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ InterruptException,
+ JobTimeoutException,
+ register,
+)
+from redash.utils import json_loads
logger = logging.getLogger(__name__)
try:
import atsd_client
from atsd_client.exceptions import SQLException
- from atsd_client.services import SQLService, MetricsService
+ from atsd_client.services import MetricsService, SQLService
enabled = True
except ImportError:
@@ -149,17 +157,16 @@ def run_query(self, query, user):
columns, rows = generate_rows_and_columns(data)
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
except SQLException as e:
- json_data = None
+ data = None
error = e.content
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
sql.cancel_query(query_id)
raise
- return json_data, error
+ return data, error
def get_schema(self, get_stats=False):
connection = atsd_client.connect_url(
diff --git a/redash/query_runner/azure_kusto.py b/redash/query_runner/azure_kusto.py
index 24293618a6..c7372fe184 100644
--- a/redash/query_runner/azure_kusto.py
+++ b/redash/query_runner/azure_kusto.py
@@ -1,18 +1,22 @@
-from redash.query_runner import BaseQueryRunner, register
from redash.query_runner import (
- TYPE_STRING,
+ TYPE_BOOLEAN,
TYPE_DATE,
TYPE_DATETIME,
- TYPE_INTEGER,
TYPE_FLOAT,
- TYPE_BOOLEAN,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
)
-from redash.utils import json_dumps, json_loads
-
+from redash.utils import json_loads
try:
- from azure.kusto.data.request import KustoClient, KustoConnectionStringBuilder, ClientRequestProperties
from azure.kusto.data.exceptions import KustoServiceError
+ from azure.kusto.data.request import (
+ ClientRequestProperties,
+ KustoClient,
+ KustoConnectionStringBuilder,
+ )
enabled = True
except ImportError:
@@ -87,7 +91,6 @@ def name(cls):
return "Azure Data Explorer (Kusto)"
def run_query(self, query, user):
-
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
connection_string=self.configuration["cluster"],
aad_app_id=self.configuration["azure_ad_client_id"],
@@ -121,16 +124,15 @@ def run_query(self, query, user):
error = None
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
except KustoServiceError as err:
- json_data = None
+ data = None
try:
error = err.args[1][0]["error"]["@message"]
except (IndexError, KeyError):
error = err.args[1]
- return json_data, error
+ return data, error
def get_schema(self, get_stats=False):
query = ".show database schema as json"
@@ -138,14 +140,10 @@ def get_schema(self, get_stats=False):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
- tables_list = schema_as_json["Databases"][self.configuration["database"]][
- "Tables"
- ].values()
+ tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()
schema = {}
diff --git a/redash/query_runner/big_query.py b/redash/query_runner/big_query.py
index eddbde199e..48e49d46e8 100644
--- a/redash/query_runner/big_query.py
+++ b/redash/query_runner/big_query.py
@@ -1,23 +1,31 @@
import datetime
import logging
-import sys
+import socket
import time
from base64 import b64decode
-import httplib2
-import requests
-
from redash import settings
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ InterruptException,
+ JobTimeoutException,
+ register,
+)
+from redash.utils import json_loads
logger = logging.getLogger(__name__)
try:
import apiclient.errors
+ import google.auth
from apiclient.discovery import build
- from apiclient.errors import HttpError
- from oauth2client.service_account import ServiceAccountCredentials
+ from apiclient.errors import HttpError # noqa: F401
+ from google.oauth2.service_account import Credentials
enabled = True
except ImportError:
@@ -52,9 +60,7 @@ def transform_row(row, fields):
for column_index, cell in enumerate(row["f"]):
field = fields[column_index]
if field.get("mode") == "REPEATED":
- cell_value = [
- transform_cell(field["type"], item["v"]) for item in cell["v"]
- ]
+ cell_value = [transform_cell(field["type"], item["v"]) for item in cell["v"]]
else:
cell_value = transform_cell(field["type"], cell["v"])
@@ -64,7 +70,7 @@ def transform_row(row, fields):
def _load_key(filename):
- f = file(filename, "rb")
+ f = open(filename, "rb")
try:
return f.read()
finally:
@@ -83,10 +89,19 @@ def _get_query_results(jobs, project_id, location, job_id, start_index):
return query_reply
+def _get_total_bytes_processed_for_resp(bq_response):
+ # BigQuery hides the total bytes processed for queries to tables with row-level access controls.
+ # For these queries the "totalBytesProcessed" field may not be defined in the response.
+ return int(bq_response.get("totalBytesProcessed", "0"))
+
+
class BigQuery(BaseQueryRunner):
- should_annotate_query = False
noop_query = "SELECT 1"
+ def __init__(self, configuration):
+ super().__init__(configuration)
+ self.should_annotate_query = configuration.get("useQueryAnnotation", False)
+
@classmethod
def enabled(cls):
return enabled
@@ -97,7 +112,7 @@ def configuration_schema(cls):
"type": "object",
"properties": {
"projectId": {"type": "string", "title": "Project ID"},
- "jsonKeyFile": {"type": "string", "title": "JSON Key File"},
+ "jsonKeyFile": {"type": "string", "title": "JSON Key File (ADC is used if omitted)"},
"totalMBytesProcessedLimit": {
"type": "number",
"title": "Scanned Data Limit (MB)",
@@ -117,8 +132,13 @@ def configuration_schema(cls):
"type": "number",
"title": "Maximum Billing Tier",
},
+ "useQueryAnnotation": {
+ "type": "boolean",
+ "title": "Use Query Annotation",
+ "default": False,
+ },
},
- "required": ["jsonKeyFile", "projectId"],
+ "required": ["projectId"],
"order": [
"projectId",
"jsonKeyFile",
@@ -128,23 +148,26 @@ def configuration_schema(cls):
"totalMBytesProcessedLimit",
"maximumBillingTier",
"userDefinedFunctionResourceUri",
+ "useQueryAnnotation",
],
"secret": ["jsonKeyFile"],
}
def _get_bigquery_service(self):
- scope = [
+ socket.setdefaulttimeout(settings.BIGQUERY_HTTP_TIMEOUT)
+
+ scopes = [
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/drive",
]
- key = json_loads(b64decode(self.configuration["jsonKeyFile"]))
-
- creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
- http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT)
- http = creds.authorize(http)
+ try:
+ key = json_loads(b64decode(self.configuration["jsonKeyFile"]))
+ creds = Credentials.from_service_account_info(key, scopes=scopes)
+ except KeyError:
+ creds = google.auth.default(scopes=scopes)[0]
- return build("bigquery", "v2", http=http, cache_discovery=False)
+ return build("bigquery", "v2", credentials=creds, cache_discovery=False)
def _get_project_id(self):
return self.configuration["projectId"]
@@ -162,7 +185,7 @@ def _get_total_bytes_processed(self, jobs, query):
job_data["useLegacySql"] = False
response = jobs.query(projectId=self._get_project_id(), body=job_data).execute()
- return int(response["totalBytesProcessed"])
+ return _get_total_bytes_processed_for_resp(response)
def _get_job_data(self, query):
job_data = {"configuration": {"query": {"query": query}}}
@@ -174,17 +197,13 @@ def _get_job_data(self, query):
job_data["configuration"]["query"]["useLegacySql"] = False
if self.configuration.get("userDefinedFunctionResourceUri"):
- resource_uris = self.configuration["userDefinedFunctionResourceUri"].split(
- ","
- )
+ resource_uris = self.configuration["userDefinedFunctionResourceUri"].split(",")
job_data["configuration"]["query"]["userDefinedFunctionResources"] = [
{"resourceUri": resource_uri} for resource_uri in resource_uris
]
if "maximumBillingTier" in self.configuration:
- job_data["configuration"]["query"][
- "maximumBillingTier"
- ] = self.configuration["maximumBillingTier"]
+ job_data["configuration"]["query"]["maximumBillingTier"] = self.configuration["maximumBillingTier"]
return job_data
@@ -227,9 +246,7 @@ def _get_query_result(self, jobs, query):
{
"name": f["name"],
"friendly_name": f["name"],
- "type": "string"
- if f.get("mode") == "REPEATED"
- else types_map.get(f["type"], "string"),
+ "type": "string" if f.get("mode") == "REPEATED" else types_map.get(f["type"], "string"),
}
for f in query_reply["schema"]["fields"]
]
@@ -237,7 +254,7 @@ def _get_query_result(self, jobs, query):
data = {
"columns": columns,
"rows": rows,
- "metadata": {"data_scanned": int(query_reply["totalBytesProcessed"])},
+ "metadata": {"data_scanned": _get_total_bytes_processed_for_resp(query_reply)},
}
return data
@@ -261,11 +278,25 @@ def _get_columns_schema_column(self, column):
return columns
+ def _get_project_datasets(self, project_id):
+ result = []
+ service = self._get_bigquery_service()
+
+ datasets = service.datasets().list(projectId=project_id).execute()
+ result.extend(datasets.get("datasets", []))
+ nextPageToken = datasets.get("nextPageToken", None)
+
+ while nextPageToken is not None:
+ datasets = service.datasets().list(projectId=project_id, pageToken=nextPageToken).execute()
+ result.extend(datasets.get("datasets", []))
+ nextPageToken = datasets.get("nextPageToken", None)
+
+ return result
+
def get_schema(self, get_stats=False):
if not self.configuration.get("loadSchema", False):
return []
- service = self._get_bigquery_service()
project_id = self._get_project_id()
datasets = service.datasets().list(projectId=project_id).execute()
@@ -305,23 +336,19 @@ def run_query(self, query, user):
try:
if "totalMBytesProcessedLimit" in self.configuration:
limitMB = self.configuration["totalMBytesProcessedLimit"]
- processedMB = (
- self._get_total_bytes_processed(jobs, query) / 1000.0 / 1000.0
- )
+ processedMB = self._get_total_bytes_processed(jobs, query) / 1000.0 / 1000.0
if limitMB < processedMB:
return (
None,
- "Larger than %d MBytes will be processed (%f MBytes)"
- % (limitMB, processedMB),
+ "Larger than %d MBytes will be processed (%f MBytes)" % (limitMB, processedMB),
)
data = self._get_query_result(jobs, query)
error = None
- json_data = json_dumps(data, ignore_nan=True)
except apiclient.errors.HttpError as e:
- json_data = None
- if e.resp.status == 400:
+ data = None
+ if e.resp.status in [400, 404]:
error = json_loads(e.content)["error"]["message"]
else:
error = e.content
@@ -335,7 +362,7 @@ def run_query(self, query, user):
raise
- return json_data, error
+ return data, error
register(BigQuery)
diff --git a/redash/query_runner/big_query_gce.py b/redash/query_runner/big_query_gce.py
index bc7a38d91d..8ff22191d7 100644
--- a/redash/query_runner/big_query_gce.py
+++ b/redash/query_runner/big_query_gce.py
@@ -1,15 +1,15 @@
import requests
-import httplib2
try:
+ import google.auth
from apiclient.discovery import build
- from oauth2client.contrib import gce
enabled = True
except ImportError:
enabled = False
from redash.query_runner import register
+
from .big_query import BigQuery
@@ -59,19 +59,11 @@ def configuration_schema(cls):
}
def _get_project_id(self):
- return requests.get(
- "http://metadata/computeMetadata/v1/project/project-id",
- headers={"Metadata-Flavor": "Google"},
- ).content
+ google.auth.default()[1]
def _get_bigquery_service(self):
- credentials = gce.AppAssertionCredentials(
- scope="https://www.googleapis.com/auth/bigquery"
- )
- http = httplib2.Http()
- http = credentials.authorize(http)
-
- return build("bigquery", "v2", http=http)
+ creds = google.auth.default(scopes=["https://www.googleapis.com/auth/bigquery"])[0]
+ return build("bigquery", "v2", credentials=creds, cache_discovery=False)
register(BigQueryGCE)
diff --git a/redash/query_runner/cass.py b/redash/query_runner/cass.py
index 4a49c0d9b1..fb5ce3ee5d 100644
--- a/redash/query_runner/cass.py
+++ b/redash/query_runner/cass.py
@@ -5,13 +5,12 @@
from tempfile import NamedTemporaryFile
from redash.query_runner import BaseQueryRunner, register
-from redash.utils import JSONEncoder, json_dumps, json_loads
logger = logging.getLogger(__name__)
try:
- from cassandra.cluster import Cluster
from cassandra.auth import PlainTextAuthProvider
+ from cassandra.cluster import Cluster
from cassandra.util import sortedset
enabled = True
@@ -20,22 +19,13 @@
def generate_ssl_options_dict(protocol, cert_path=None):
- ssl_options = {
- 'ssl_version': getattr(ssl, protocol)
- }
+ ssl_options = {"ssl_version": getattr(ssl, protocol)}
if cert_path is not None:
- ssl_options['ca_certs'] = cert_path
- ssl_options['cert_reqs'] = ssl.CERT_REQUIRED
+ ssl_options["ca_certs"] = cert_path
+ ssl_options["cert_reqs"] = ssl.CERT_REQUIRED
return ssl_options
-class CassandraJSONEncoder(JSONEncoder):
- def default(self, o):
- if isinstance(o, sortedset):
- return list(o)
- return super(CassandraJSONEncoder, self).default(o)
-
-
class Cassandra(BaseQueryRunner):
noop_query = "SELECT dateof(now()) FROM system.local"
@@ -43,6 +33,12 @@ class Cassandra(BaseQueryRunner):
def enabled(cls):
return enabled
+ @classmethod
+ def custom_json_encoder(cls, dec, o):
+ if isinstance(o, sortedset):
+ return list(o)
+ return None
+
@classmethod
def configuration_schema(cls):
return {
@@ -60,10 +56,7 @@ def configuration_schema(cls):
},
"timeout": {"type": "number", "title": "Timeout", "default": 10},
"useSsl": {"type": "boolean", "title": "Use SSL", "default": False},
- "sslCertificateFile": {
- "type": "string",
- "title": "SSL Certificate File"
- },
+ "sslCertificateFile": {"type": "string", "title": "SSL Certificate File"},
"sslProtocol": {
"type": "string",
"title": "SSL Protocol",
@@ -91,7 +84,6 @@ def get_schema(self, get_stats=False):
select release_version from system.local;
"""
results, error = self.run_query(query, None)
- results = json_loads(results)
release_version = results["rows"][0]["release_version"]
query = """
@@ -112,7 +104,6 @@ def get_schema(self, get_stats=False):
)
results, error = self.run_query(query, None)
- results = json_loads(results)
schema = {}
for row in results["rows"]:
@@ -127,9 +118,7 @@ def get_schema(self, get_stats=False):
def run_query(self, query, user):
connection = None
cert_path = self._generate_cert_file()
- if self.configuration.get("username", "") and self.configuration.get(
- "password", ""
- ):
+ if self.configuration.get("username", "") and self.configuration.get("password", ""):
auth_provider = PlainTextAuthProvider(
username="{}".format(self.configuration.get("username", "")),
password="{}".format(self.configuration.get("password", "")),
@@ -162,14 +151,13 @@ def run_query(self, query, user):
rows = [dict(zip(column_names, row)) for row in result]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data, cls=CassandraJSONEncoder)
- return json_data, None
+ return data, None
def _generate_cert_file(self):
cert_encoded_bytes = self.configuration.get("sslCertificateFile", None)
if cert_encoded_bytes:
- with NamedTemporaryFile(mode='w', delete=False) as cert_file:
+ with NamedTemporaryFile(mode="w", delete=False) as cert_file:
cert_bytes = b64decode(cert_encoded_bytes)
cert_file.write(cert_bytes.decode("utf-8"))
return cert_file.name
@@ -182,10 +170,7 @@ def _cleanup_cert_file(self, cert_path):
def _get_ssl_options(self, cert_path):
ssl_options = None
if self.configuration.get("useSsl", False):
- ssl_options = generate_ssl_options_dict(
- protocol=self.configuration["sslProtocol"],
- cert_path=cert_path
- )
+ ssl_options = generate_ssl_options_dict(protocol=self.configuration["sslProtocol"], cert_path=cert_path)
return ssl_options
diff --git a/redash/query_runner/clickhouse.py b/redash/query_runner/clickhouse.py
index c2a1c6ebb5..a443659237 100644
--- a/redash/query_runner/clickhouse.py
+++ b/redash/query_runner/clickhouse.py
@@ -1,15 +1,28 @@
import logging
import re
from urllib.parse import urlparse
+from uuid import uuid4
import requests
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ register,
+ split_sql_statements,
+)
logger = logging.getLogger(__name__)
+def split_multi_query(query):
+ return [st for st in split_sql_statements(query) if st != ""]
+
+
class ClickHouse(BaseSQLQueryRunner):
noop_query = "SELECT 1"
@@ -39,10 +52,6 @@ def configuration_schema(cls):
"secret": ["password"],
}
- @classmethod
- def type(cls):
- return "clickhouse"
-
@property
def _url(self):
return urlparse(self.configuration["url"])
@@ -73,9 +82,7 @@ def _get_tables(self, schema):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
for row in results["rows"]:
table_name = "{}.{}".format(row["database"], row["table"])
@@ -87,31 +94,49 @@ def _get_tables(self, schema):
return list(schema.values())
- def _send_query(self, data, stream=False):
+ def _send_query(self, data, session_id=None, session_check=None):
url = self.configuration.get("url", "http://127.0.0.1:8123")
+ timeout = self.configuration.get("timeout", 30)
+
+ params = {
+ "user": self.configuration.get("user", "default"),
+ "password": self.configuration.get("password", ""),
+ "database": self.configuration["dbname"],
+ "default_format": "JSON",
+ }
+
+ if session_id:
+ params["session_id"] = session_id
+ params["session_check"] = "1" if session_check else "0"
+ params["session_timeout"] = timeout
+
try:
verify = self.configuration.get("verify", True)
r = requests.post(
url,
- data=data.encode("utf-8","ignore"),
- stream=stream,
- timeout=self.configuration.get("timeout", 30),
- params={
- "user": self.configuration.get("user", "default"),
- "password": self.configuration.get("password", ""),
- "database": self.configuration["dbname"],
- },
+ data=data.encode("utf-8", "ignore"),
+ stream=False,
+ timeout=timeout,
+ params=params,
verify=verify,
)
- if r.status_code != 200:
+
+ if not r.ok:
raise Exception(r.text)
- # logging.warning(r.json())
- return r.json()
+
+ # In certain situations the response body can be empty even if the query was successful, for example
+ # when creating temporary tables.
+ if not r.text:
+ return {}
+
+ response = r.json()
+ if "exception" in response:
+ raise Exception(response["exception"])
+
+ return response
except requests.RequestException as e:
if e.response:
- details = "({}, Status Code: {})".format(
- e.__class__.__name__, e.response.status_code
- )
+ details = "({}, Status Code: {})".format(e.__class__.__name__, e.response.status_code)
else:
details = "({})".format(e.__class__.__name__)
raise Exception("Connection error to: {} {}.".format(url, details))
@@ -133,29 +158,30 @@ def _define_column_type(column):
else:
return TYPE_STRING
- def _clickhouse_query(self, query):
+ def _clickhouse_query(self, query, session_id=None, session_check=None):
+ logger.debug(f"{self.name()} is about to execute query: %s", query)
+
query += "\nFORMAT JSON"
- result = self._send_query(query)
+
+ response = self._send_query(query, session_id, session_check)
+
columns = []
columns_int64 = [] # db converts value to string if its type equals UInt64
columns_totals = {}
- for r in result["meta"]:
+ meta = response.get("meta", [])
+ for r in meta:
column_name = r["name"]
column_type = self._define_column_type(r["type"])
if r["type"] in ("Int64", "UInt64", "Nullable(Int64)", "Nullable(UInt64)"):
columns_int64.append(column_name)
else:
- columns_totals[column_name] = (
- "Total" if column_type == TYPE_STRING else None
- )
+ columns_totals[column_name] = "Total" if column_type == TYPE_STRING else None
- columns.append(
- {"name": column_name, "friendly_name": column_name, "type": column_type}
- )
+ columns.append({"name": column_name, "friendly_name": column_name, "type": column_type})
- rows = result["data"]
+ rows = response.get("data", [])
for row in rows:
for column in columns_int64:
try:
@@ -163,8 +189,8 @@ def _clickhouse_query(self, query):
except TypeError:
row[column] = None
- if "totals" in result:
- totals = result["totals"]
+ if "totals" in response:
+ totals = response["totals"]
for column, value in columns_totals.items():
totals[column] = value
rows.append(totals)
@@ -172,14 +198,27 @@ def _clickhouse_query(self, query):
return {"columns": columns, "rows": rows}
def run_query(self, query, user):
- logger.debug("Clickhouse is about to execute query: %s", query)
- if query == "":
- json_data = None
+ queries = split_multi_query(query)
+
+ if not queries:
+ data = None
error = "Query is empty"
- return json_data, error
+ return data, error
+
try:
- q = self._clickhouse_query(query)
- data = json_dumps(q)
+ # If just one query was given no session is needed
+ if len(queries) == 1:
+ data = self._clickhouse_query(queries[0])
+ else:
+ # If more than one query was given, a session is needed. Parameter session_check must be false
+ # for the first query
+ session_id = "redash_{}".format(uuid4().hex)
+
+ data = self._clickhouse_query(queries[0], session_id, session_check=False)
+
+ for query in queries[1:]:
+ data = self._clickhouse_query(query, session_id, session_check=True)
+
error = None
except Exception as e:
data = None
diff --git a/redash/query_runner/cloudwatch.py b/redash/query_runner/cloudwatch.py
index c4640a537d..699834c0a9 100644
--- a/redash/query_runner/cloudwatch.py
+++ b/redash/query_runner/cloudwatch.py
@@ -1,15 +1,18 @@
-import yaml
import datetime
+import yaml
+
from redash.query_runner import BaseQueryRunner, register
-from redash.utils import json_dumps, parse_human_time
+from redash.utils import parse_human_time
try:
import boto3
+
enabled = True
except ImportError:
enabled = False
+
def parse_response(results):
columns = [
{"name": "id", "type": "string"},
@@ -118,7 +121,7 @@ def run_query(self, query, user):
rows, columns = parse_response(results)
- return json_dumps({"rows": rows, "columns": columns}), None
+ return {"rows": rows, "columns": columns}, None
register(CloudWatch)
diff --git a/redash/query_runner/cloudwatch_insights.py b/redash/query_runner/cloudwatch_insights.py
index 139d5b678a..f0ebcea117 100644
--- a/redash/query_runner/cloudwatch_insights.py
+++ b/redash/query_runner/cloudwatch_insights.py
@@ -1,13 +1,15 @@
-import yaml
import datetime
import time
+import yaml
+
from redash.query_runner import BaseQueryRunner, register
-from redash.utils import json_dumps, parse_human_time
+from redash.utils import parse_human_time
try:
import boto3
- from botocore.exceptions import ParamValidationError
+ from botocore.exceptions import ParamValidationError # noqa: F401
+
enabled = True
except ImportError:
enabled = False
@@ -118,9 +120,7 @@ def get_schema(self, get_stats=False):
log_groups.append(
{
"name": group_name,
- "columns": [
- field["name"] for field in fields["logGroupFields"]
- ],
+ "columns": [field["name"] for field in fields["logGroupFields"]],
}
)
@@ -139,18 +139,14 @@ def run_query(self, query, user):
data = parse_response(result)
break
if result["status"] in ("Failed", "Timeout", "Unknown", "Cancelled"):
- raise Exception(
- "CloudWatch Insights Query Execution Status: {}".format(
- result["status"]
- )
- )
+ raise Exception("CloudWatch Insights Query Execution Status: {}".format(result["status"]))
elif elapsed > TIMEOUT:
raise Exception("Request exceeded timeout.")
else:
time.sleep(POLL_INTERVAL)
elapsed += POLL_INTERVAL
- return json_dumps(data), None
+ return data, None
register(CloudWatchInsights)
diff --git a/redash/query_runner/corporate_memory.py b/redash/query_runner/corporate_memory.py
index 7f43b22869..0eb33c89f1 100644
--- a/redash/query_runner/corporate_memory.py
+++ b/redash/query_runner/corporate_memory.py
@@ -4,17 +4,21 @@
seeAlso: https://eccenca.com/
"""
-import logging
import json
+import logging
from os import environ
from redash.query_runner import BaseQueryRunner
-from redash.utils import json_dumps, json_loads
+
from . import register
try:
- from cmem.cmempy.queries import SparqlQuery, QueryCatalog, QUERY_STRING
from cmem.cmempy.dp.proxy.graph import get_graphs_list
+ from cmem.cmempy.queries import ( # noqa: F401
+ QUERY_STRING,
+ QueryCatalog,
+ SparqlQuery,
+ )
enabled = True
except ImportError:
@@ -110,7 +114,7 @@ def _transform_sparql_results(results):
logger.info("results are: {}".format(results))
# Not sure why we do not use the json package here but all other
# query runner do it the same way :-)
- sparql_results = json_loads(results)
+ sparql_results = results
# transform all bindings to redash rows
rows = []
for sparql_row in sparql_results["results"]["bindings"]:
@@ -128,7 +132,7 @@ def _transform_sparql_results(results):
columns.append({"name": var, "friendly_name": var, "type": "string"})
# Not sure why we do not use the json package here but all other
# query runner do it the same way :-)
- return json_dumps({"columns": columns, "rows": rows})
+ return {"columns": columns, "rows": rows}
@classmethod
def name(cls):
@@ -151,9 +155,7 @@ def run_query(self, query, user):
# type of None means, there is an error in the query
# so execution is at least tried on endpoint
if query_type not in ["SELECT", None]:
- raise ValueError(
- "Queries of type {} can not be processed by redash.".format(query_type)
- )
+ raise ValueError("Queries of type {} can not be processed by redash.".format(query_type))
self._setup_environment()
try:
diff --git a/redash/query_runner/couchbase.py b/redash/query_runner/couchbase.py
index 6f264c5333..4a40ad7499 100644
--- a/redash/query_runner/couchbase.py
+++ b/redash/query_runner/couchbase.py
@@ -1,16 +1,20 @@
import datetime
import logging
-from dateutil.parser import parse
-
-from redash.query_runner import *
-from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time
-import json
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
logger = logging.getLogger(__name__)
try:
+ import httplib2 # noqa: F401
import requests
- import httplib2
except ImportError as e:
logger.error("Failed to import: " + str(e))
@@ -48,9 +52,7 @@ def parse_results(results):
{
"name": column_name,
"friendly_name": column_name,
- "type": TYPES_MAP.get(
- type(row[key][inner_key]), TYPE_STRING
- ),
+ "type": TYPES_MAP.get(type(row[key][inner_key]), TYPE_STRING),
}
)
@@ -104,7 +106,7 @@ def enabled(cls):
return True
def test_connection(self):
- result = self.call_service(self.noop_query, "")
+ self.call_service(self.noop_query, "")
def get_buckets(self, query, name_param):
defaultColumns = ["meta().id"]
@@ -117,7 +119,6 @@ def get_buckets(self, query, name_param):
return list(schema.values())
def get_schema(self, get_stats=False):
-
try:
# Try fetch from Analytics
return self.get_buckets(
@@ -153,7 +154,7 @@ def run_query(self, query, user):
rows, columns = parse_results(result.json()["results"])
data = {"columns": columns, "rows": rows}
- return json_dumps(data), None
+ return data, None
@classmethod
def name(cls):
diff --git a/redash/query_runner/databend.py b/redash/query_runner/databend.py
new file mode 100644
index 0000000000..5e8062061b
--- /dev/null
+++ b/redash/query_runner/databend.py
@@ -0,0 +1,145 @@
+try:
+ import re
+
+ from databend_sqlalchemy import connector
+
+ enabled = True
+except ImportError:
+ enabled = False
+
+from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
+
+
+class Databend(BaseQueryRunner):
+ noop_query = "SELECT 1"
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "host": {"type": "string", "default": "localhost"},
+ "port": {"type": "string", "default": "8000"},
+ "username": {"type": "string"},
+ "password": {"type": "string", "default": ""},
+ "database": {"type": "string"},
+ "secure": {"type": "boolean", "default": False},
+ },
+ "order": ["username", "password", "host", "port", "database"],
+ "required": ["username", "database"],
+ "secret": ["password"],
+ }
+
+ @classmethod
+ def name(cls):
+ return "Databend"
+
+ @classmethod
+ def type(cls):
+ return "databend"
+
+ @classmethod
+ def enabled(cls):
+ return enabled
+
+ @staticmethod
+ def _define_column_type(column_type):
+ c = column_type.lower()
+ f = re.search(r"^nullable\((.*)\)$", c)
+ if f is not None:
+ c = f.group(1)
+ if c.startswith("int") or c.startswith("uint"):
+ return TYPE_INTEGER
+ elif c.startswith("float"):
+ return TYPE_FLOAT
+ elif c == "datetime":
+ return TYPE_DATETIME
+ elif c == "date":
+ return TYPE_DATE
+ else:
+ return TYPE_STRING
+
+ def run_query(self, query, user):
+ host = self.configuration.get("host") or "localhost"
+ port = self.configuration.get("port") or "8000"
+ username = self.configuration.get("username") or "root"
+ password = self.configuration.get("password") or ""
+ database = self.configuration.get("database") or "default"
+ secure = self.configuration.get("secure") or False
+ connection = connector.connect(f"databend://{username}:{password}@{host}:{port}/{database}?secure={secure}")
+ cursor = connection.cursor()
+
+ try:
+ cursor.execute(query)
+ columns = self.fetch_columns([(i[0], self._define_column_type(i[1])) for i in cursor.description])
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
+
+ data = {"columns": columns, "rows": rows}
+ error = None
+ finally:
+ connection.close()
+
+ return data, error
+
+ def get_schema(self, get_stats=False):
+ query = """
+ SELECT TABLE_SCHEMA,
+ TABLE_NAME,
+ COLUMN_NAME
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_SCHEMA NOT IN ('information_schema', 'system')
+ """
+
+ results, error = self.run_query(query, None)
+
+ if error is not None:
+ self._handle_run_query_error(error)
+
+ schema = {}
+
+ for row in results["rows"]:
+ table_name = "{}.{}".format(row["table_schema"], row["table_name"])
+
+ if table_name not in schema:
+ schema[table_name] = {"name": table_name, "columns": []}
+
+ schema[table_name]["columns"].append(row["column_name"])
+
+ return list(schema.values())
+
+ def _get_tables(self):
+ query = """
+ SELECT TABLE_SCHEMA,
+ TABLE_NAME,
+ COLUMN_NAME
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_SCHEMA NOT IN ('information_schema', 'system')
+ """
+
+ results, error = self.run_query(query, None)
+
+ if error is not None:
+ self._handle_run_query_error(error)
+
+ schema = {}
+
+ for row in results["rows"]:
+ table_name = "{}.{}".format(row["table_schema"], row["table_name"])
+
+ if table_name not in schema:
+ schema[table_name] = {"name": table_name, "columns": []}
+
+ schema[table_name]["columns"].append(row["column_name"])
+
+ return list(schema.values())
+
+
+register(Databend)
diff --git a/redash/query_runner/databricks.py b/redash/query_runner/databricks.py
index 5cc98ce17f..886ba9b8b4 100644
--- a/redash/query_runner/databricks.py
+++ b/redash/query_runner/databricks.py
@@ -1,21 +1,21 @@
import datetime
import logging
import os
-import sqlparse
+
+from redash import __version__, statsd_client
from redash.query_runner import (
- NotSupported,
- register,
- BaseSQLQueryRunner,
- TYPE_STRING,
TYPE_BOOLEAN,
TYPE_DATE,
TYPE_DATETIME,
- TYPE_INTEGER,
TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ NotSupported,
+ register,
+ split_sql_statements,
)
from redash.settings import cast_int_or_default
-from redash.utils import json_dumps, json_loads
-from redash import __version__, settings, statsd_client
try:
import pyodbc
@@ -37,72 +37,11 @@
logger = logging.getLogger(__name__)
+
def _build_odbc_connection_string(**kwargs):
return ";".join([f"{k}={v}" for k, v in kwargs.items()])
-def split_sql_statements(query):
- def strip_trailing_comments(stmt):
- idx = len(stmt.tokens) - 1
- while idx >= 0:
- tok = stmt.tokens[idx]
- if tok.is_whitespace or sqlparse.utils.imt(
- tok, i=sqlparse.sql.Comment, t=sqlparse.tokens.Comment
- ):
- stmt.tokens[idx] = sqlparse.sql.Token(sqlparse.tokens.Whitespace, " ")
- else:
- break
- idx -= 1
- return stmt
-
- def strip_trailing_semicolon(stmt):
- idx = len(stmt.tokens) - 1
- while idx >= 0:
- tok = stmt.tokens[idx]
- # we expect that trailing comments already are removed
- if not tok.is_whitespace:
- if (
- sqlparse.utils.imt(tok, t=sqlparse.tokens.Punctuation)
- and tok.value == ";"
- ):
- stmt.tokens[idx] = sqlparse.sql.Token(
- sqlparse.tokens.Whitespace, " "
- )
- break
- idx -= 1
- return stmt
-
- def is_empty_statement(stmt):
- strip_comments = sqlparse.filters.StripCommentsFilter()
-
- # copy statement object. `copy.deepcopy` fails to do this, so just re-parse it
- st = sqlparse.engine.FilterStack()
- stmt = next(st.run(sqlparse.text_type(stmt)))
-
- sql = sqlparse.text_type(strip_comments.process(stmt))
- return sql.strip() == ""
-
- stack = sqlparse.engine.FilterStack()
-
- result = [stmt for stmt in stack.run(query)]
- result = [strip_trailing_comments(stmt) for stmt in result]
- result = [strip_trailing_semicolon(stmt) for stmt in result]
- result = [
- sqlparse.text_type(stmt).strip()
- for stmt in result
- if not is_empty_statement(stmt)
- ]
-
- if len(result) > 0:
- return result
-
- return [""] # if all statements were empty - return a single empty statement
-
-
-def combine_sql_statements(queries):
- return ";\n".join(queries)
-
-
class Databricks(BaseSQLQueryRunner):
noop_query = "SELECT 1"
should_annotate_query = False
@@ -165,37 +104,23 @@ def run_query(self, query, user):
if cursor.description is not None:
result_set = cursor.fetchmany(ROW_LIMIT)
- columns = self.fetch_columns(
- [
- (i[0], TYPES_MAP.get(i[1], TYPE_STRING))
- for i in cursor.description
- ]
- )
-
- rows = [
- dict(zip((column["name"] for column in columns), row))
- for row in result_set
- ]
+ columns = self.fetch_columns([(i[0], TYPES_MAP.get(i[1], TYPE_STRING)) for i in cursor.description])
+
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in result_set]
data = {"columns": columns, "rows": rows}
- if (
- len(result_set) >= ROW_LIMIT
- and cursor.fetchone() is not None
- ):
+ if len(result_set) >= ROW_LIMIT and cursor.fetchone() is not None:
logger.warning("Truncated result set.")
statsd_client.incr("redash.query_runner.databricks.truncated")
data["truncated"] = True
- json_data = json_dumps(data)
error = None
else:
error = None
- json_data = json_dumps(
- {
- "columns": [{"name": "result", "type": TYPE_STRING}],
- "rows": [{"result": "No data was returned."}],
- }
- )
+ data = {
+ "columns": [{"name": "result", "type": TYPE_STRING}],
+ "rows": [{"result": "No data was returned."}],
+ }
cursor.close()
except pyodbc.Error as e:
@@ -203,9 +128,9 @@ def run_query(self, query, user):
error = str(e.args[1])
else:
error = str(e)
- json_data = None
+ data = None
- return json_data, error
+ return data, error
def get_schema(self):
raise NotSupported()
@@ -215,9 +140,7 @@ def get_databases(self):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
first_column_name = results["columns"][0]["name"]
return [row[first_column_name] for row in results["rows"]]
diff --git a/redash/query_runner/db2.py b/redash/query_runner/db2.py
index ea09ad6da8..88a843af98 100644
--- a/redash/query_runner/db2.py
+++ b/redash/query_runner/db2.py
@@ -1,12 +1,22 @@
import logging
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ InterruptException,
+ JobTimeoutException,
+ register,
+)
logger = logging.getLogger(__name__)
try:
import select
+
import ibm_db_dbi
types_map = {
@@ -55,7 +65,7 @@ def type(cls):
@classmethod
def enabled(cls):
try:
- import ibm_db
+ import ibm_db # noqa: F401
except ImportError:
return False
@@ -65,9 +75,7 @@ def _get_definitions(self, schema, query):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
for row in results["rows"]:
if row["TABLE_SCHEMA"] != "public":
@@ -114,33 +122,27 @@ def run_query(self, query, user):
cursor.execute(query)
if cursor.description is not None:
- columns = self.fetch_columns(
- [(i[0], types_map.get(i[1], None)) for i in cursor.description]
- )
- rows = [
- dict(zip((column["name"] for column in columns), row))
- for row in cursor
- ]
+ columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
data = {"columns": columns, "rows": rows}
error = None
- json_data = json_dumps(data)
else:
error = "Query completed but it returned no data."
- json_data = None
- except (select.error, OSError) as e:
+ data = None
+ except (select.error, OSError):
error = "Query interrupted. Please retry."
- json_data = None
+ data = None
except ibm_db_dbi.DatabaseError as e:
error = str(e)
- json_data = None
+ data = None
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
connection.cancel()
raise
finally:
connection.close()
- return json_data, error
+ return data, error
register(DB2)
diff --git a/redash/query_runner/dgraph.py b/redash/query_runner/dgraph.py
index f48f8d91f3..302a474b91 100644
--- a/redash/query_runner/dgraph.py
+++ b/redash/query_runner/dgraph.py
@@ -8,18 +8,17 @@
enabled = False
from redash.query_runner import BaseQueryRunner, register
-from redash.utils import json_dumps
def reduce_item(reduced_item, key, value):
"""From https://github.com/vinay20045/json-to-csv"""
# Reduction Condition 1
- if type(value) is list:
+ if isinstance(value, list):
for i, sub_item in enumerate(value):
reduce_item(reduced_item, "{}.{}".format(key, i), sub_item)
# Reduction Condition 2
- elif type(value) is dict:
+ elif isinstance(value, dict):
sub_keys = value.keys()
for sub_key in sub_keys:
reduce_item(reduced_item, "{}.{}".format(key, sub_key), value[sub_key])
@@ -81,8 +80,7 @@ def run_dgraph_query_raw(self, query):
client_stub.close()
def run_query(self, query, user):
-
- json_data = None
+ data = None
error = None
try:
@@ -106,18 +104,14 @@ def run_query(self, query, user):
header = list(set(header))
- columns = [
- {"name": c, "friendly_name": c, "type": "string"} for c in header
- ]
+ columns = [{"name": c, "friendly_name": c, "type": "string"} for c in header]
# finally, assemble both the columns and data
data = {"columns": columns, "rows": processed_data}
-
- json_data = json_dumps(data)
except Exception as e:
error = e
- return json_data, error
+ return data, error
def get_schema(self, get_stats=False):
"""Queries Dgraph for all the predicates, their types, their tokenizers, etc.
diff --git a/redash/query_runner/drill.py b/redash/query_runner/drill.py
index e843e68384..a011e8590c 100644
--- a/redash/query_runner/drill.py
+++ b/redash/query_runner/drill.py
@@ -1,19 +1,18 @@
-import os
import logging
+import os
import re
from dateutil import parser
from redash.query_runner import (
- BaseHTTPQueryRunner,
- register,
+ TYPE_BOOLEAN,
TYPE_DATETIME,
- TYPE_INTEGER,
TYPE_FLOAT,
- TYPE_BOOLEAN,
+ TYPE_INTEGER,
+ BaseHTTPQueryRunner,
guess_type,
+ register,
)
-from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -51,9 +50,7 @@ def parse_response(data):
types = {}
for c in cols:
- columns.append(
- {"name": c, "type": guess_type(first_row[c]), "friendly_name": c}
- )
+ columns.append({"name": c, "type": guess_type(first_row[c]), "friendly_name": c})
for col in columns:
types[col["name"]] = col["type"]
@@ -96,18 +93,13 @@ def run_query(self, query, user):
payload = {"queryType": "SQL", "query": query}
- response, error = self.get_response(
- drill_url, http_method="post", json=payload
- )
+ response, error = self.get_response(drill_url, http_method="post", json=payload)
if error is not None:
return None, error
- results = parse_response(response.json())
-
- return json_dumps(results), None
+ return parse_response(response.json()), None
def get_schema(self, get_stats=False):
-
query = """
SELECT DISTINCT
TABLE_SCHEMA,
@@ -135,9 +127,7 @@ def get_schema(self, get_stats=False):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
schema = {}
diff --git a/redash/query_runner/druid.py b/redash/query_runner/druid.py
index 0790d5e1e6..b20a01953d 100644
--- a/redash/query_runner/druid.py
+++ b/redash/query_runner/druid.py
@@ -5,9 +5,13 @@
except ImportError:
enabled = False
-from redash.query_runner import BaseQueryRunner, register
-from redash.query_runner import TYPE_STRING, TYPE_INTEGER, TYPE_BOOLEAN
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
TYPES_MAP = {1: TYPE_STRING, 2: TYPE_INTEGER, 3: TYPE_BOOLEAN}
@@ -49,21 +53,15 @@ def run_query(self, query, user):
try:
cursor.execute(query)
- columns = self.fetch_columns(
- [(i[0], TYPES_MAP.get(i[1], None)) for i in cursor.description]
- )
- rows = [
- dict(zip((column["name"] for column in columns), row)) for row in cursor
- ]
+ columns = self.fetch_columns([(i[0], TYPES_MAP.get(i[1], None)) for i in cursor.description])
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
data = {"columns": columns, "rows": rows}
error = None
- json_data = json_dumps(data)
- print(json_data)
finally:
connection.close()
- return json_data, error
+ return data, error
def get_schema(self, get_stats=False):
query = """
@@ -77,10 +75,9 @@ def get_schema(self, get_stats=False):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
+ self._handle_run_query_error(error)
schema = {}
- results = json_loads(results)
for row in results["rows"]:
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
diff --git a/redash/query_runner/dynamodb_sql.py b/redash/query_runner/dynamodb_sql.py
deleted file mode 100644
index 965cc8fc2f..0000000000
--- a/redash/query_runner/dynamodb_sql.py
+++ /dev/null
@@ -1,144 +0,0 @@
-import logging
-import sys
-
-from redash.query_runner import *
-from redash.utils import json_dumps
-
-logger = logging.getLogger(__name__)
-
-try:
- from dql import Engine, FragmentEngine
- from dynamo3 import DynamoDBError
- from pyparsing import ParseException
-
- enabled = True
-except ImportError as e:
- enabled = False
-
-types_map = {
- "UNICODE": TYPE_INTEGER,
- "TINYINT": TYPE_INTEGER,
- "SMALLINT": TYPE_INTEGER,
- "INT": TYPE_INTEGER,
- "DOUBLE": TYPE_FLOAT,
- "DECIMAL": TYPE_FLOAT,
- "FLOAT": TYPE_FLOAT,
- "REAL": TYPE_FLOAT,
- "BOOLEAN": TYPE_BOOLEAN,
- "TIMESTAMP": TYPE_DATETIME,
- "DATE": TYPE_DATETIME,
- "CHAR": TYPE_STRING,
- "STRING": TYPE_STRING,
- "VARCHAR": TYPE_STRING,
-}
-
-
-class DynamoDBSQL(BaseSQLQueryRunner):
- should_annotate_query = False
-
- @classmethod
- def configuration_schema(cls):
- return {
- "type": "object",
- "properties": {
- "region": {"type": "string", "default": "us-east-1"},
- "access_key": {"type": "string"},
- "secret_key": {"type": "string"},
- },
- "required": ["access_key", "secret_key"],
- "secret": ["secret_key"],
- }
-
- def test_connection(self):
- engine = self._connect()
- list(engine.connection.list_tables())
-
- @classmethod
- def type(cls):
- return "dynamodb_sql"
-
- @classmethod
- def name(cls):
- return "DynamoDB (with DQL)"
-
- def _connect(self):
- engine = FragmentEngine()
- config = self.configuration.to_dict()
-
- if not config.get("region"):
- config["region"] = "us-east-1"
-
- if config.get("host") == "":
- config["host"] = None
-
- engine.connect(**config)
-
- return engine
-
- def _get_tables(self, schema):
- engine = self._connect()
-
- # We can't use describe_all because sometimes a user might give List permission
- # for * (all tables), but describe permission only for some of them.
- tables = engine.connection.list_tables()
- for table_name in tables:
- try:
- table = engine.describe(table_name, True)
- schema[table.name] = {
- "name": table.name,
- "columns": list(table.attrs.keys()),
- }
- except DynamoDBError:
- pass
-
- def run_query(self, query, user):
- engine = None
- try:
- engine = self._connect()
-
- if not query.endswith(";"):
- query = query + ";"
-
- result = engine.execute(query)
-
- columns = []
- rows = []
-
- # When running a count query it returns the value as a string, in which case
- # we transform it into a dictionary to be the same as regular queries.
- if isinstance(result, str):
- # when count < scanned_count, dql returns a string with number of rows scanned
- value = result.split(" (")[0]
- if value:
- value = int(value)
- result = [{"value": value}]
-
- for item in result:
- if not columns:
- for k, v in item.items():
- columns.append(
- {
- "name": k,
- "friendly_name": k,
- "type": types_map.get(str(type(v)).upper(), None),
- }
- )
- rows.append(item)
-
- data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
- error = None
- except ParseException as e:
- error = "Error parsing query at line {} (column {}):\n{}".format(
- e.lineno, e.column, e.line
- )
- json_data = None
- except (KeyboardInterrupt, JobTimeoutException):
- if engine and engine.connection:
- engine.connection.cancel()
- raise
-
- return json_data, error
-
-
-register(DynamoDBSQL)
diff --git a/redash/query_runner/e6data.py b/redash/query_runner/e6data.py
new file mode 100644
index 0000000000..0087c22e1a
--- /dev/null
+++ b/redash/query_runner/e6data.py
@@ -0,0 +1,152 @@
+import logging
+
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
+
+try:
+ from e6data_python_connector import Connection
+
+ enabled = True
+
+except ImportError:
+ enabled = False
+
+
+logger = logging.getLogger(__name__)
+
+E6DATA_TYPES_MAPPING = {
+ "INT": TYPE_INTEGER,
+ "BYTE": TYPE_INTEGER,
+ "INTEGER": TYPE_INTEGER,
+ "LONG": TYPE_INTEGER,
+ "SHORT": TYPE_INTEGER,
+ "FLOAT": TYPE_FLOAT,
+ "DOUBLE": TYPE_FLOAT,
+ "STRING": TYPE_STRING,
+ "DATETIME": TYPE_DATETIME,
+ "BINARY": TYPE_INTEGER,
+ "ARRAY": TYPE_STRING,
+ "MAP": TYPE_STRING,
+ "STRUCT": TYPE_STRING,
+ "UNION_TYPE": TYPE_STRING,
+ "DECIMAL_TYPE": TYPE_FLOAT,
+ "DATE": TYPE_DATE,
+ "INT96": TYPE_INTEGER,
+ "BOOLEAN": TYPE_BOOLEAN,
+ "CHAR": TYPE_STRING,
+}
+
+
+class e6data(BaseQueryRunner):
+ limit_query = " LIMIT 1000"
+
+ should_annotate_query = False
+
+ def __init__(self, configuration):
+ super().__init__(configuration)
+ self.connection = Connection(
+ host=self.configuration.get("host"),
+ port=self.configuration.get("port"),
+ username=self.configuration.get("username"),
+ database=self.configuration.get("database"),
+ password=self.configuration.get("password"),
+ )
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "host": {"type": "string"},
+ "port": {"type": "number"},
+ "username": {"type": "string"},
+ "password": {"type": "string"},
+ "catalog": {"type": "string"},
+ "database": {"type": "string"},
+ },
+ "order": [
+ "host",
+ "port",
+ "username",
+ "password",
+ "catalog",
+ "database",
+ ],
+ "required": ["host", "port", "username", "password", "catalog", "database"],
+ "secret": ["password"],
+ }
+
+ @classmethod
+ def enabled(cls):
+ return enabled
+
+ @classmethod
+ def type(cls):
+ return "e6data"
+
+ def run_query(self, query, user):
+ cursor = None
+ try:
+ cursor = self.connection.cursor(catalog_name=self.configuration.get("catalog"))
+ cursor.execute(query)
+ results = cursor.fetchall()
+ description = cursor.description
+ columns = []
+ for c in description:
+ column_name, column_type = c[0], E6DATA_TYPES_MAPPING.get(c[1], None)
+ columns.append({"name": column_name, "type": column_type})
+ rows = [dict(zip([c["name"] for c in columns], r)) for r in results]
+ data = {"columns": columns, "rows": rows}
+ error = None
+
+ except Exception as error:
+ logger.debug(error)
+ data = None
+ finally:
+ if cursor is not None:
+ cursor.clear()
+ cursor.close()
+
+ return data, error
+
+ def test_connection(self):
+ self.noop_query = "SELECT 1"
+
+ data, error = self.run_query(self.noop_query, None)
+
+ if error is not None:
+ raise Exception(error)
+
+ def get_schema(self, get_stats=False):
+ tables = self.connection.get_tables(self.configuration.get("catalog"), self.configuration.get("database"))
+
+ schema = list()
+
+ for table_name in tables:
+ columns = self.connection.get_columns(
+ self.configuration.get("catalog"),
+ self.configuration.get("database"),
+ table_name,
+ )
+ columns_with_type = []
+
+ for column in columns:
+ redash_type = E6DATA_TYPES_MAPPING.get(column["fieldType"], None)
+ columns_with_type.append({"name": column["fieldName"], "type": redash_type})
+
+ table_schema = {"name": table_name, "columns": columns_with_type}
+
+ schema.append(table_schema)
+
+ return schema
+
+
+register(e6data)
diff --git a/redash/query_runner/elasticsearch.py b/redash/query_runner/elasticsearch.py
index d96dd1e433..aa34d71eff 100644
--- a/redash/query_runner/elasticsearch.py
+++ b/redash/query_runner/elasticsearch.py
@@ -1,14 +1,22 @@
import logging
-import sys
-import urllib.request
-import urllib.parse
import urllib.error
+import urllib.parse
+import urllib.request
import requests
from requests.auth import HTTPBasicAuth
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ JobTimeoutException,
+ register,
+)
+from redash.utils import json_loads
try:
import http.client as http_client
@@ -44,6 +52,7 @@
class BaseElasticSearch(BaseQueryRunner):
should_annotate_query = False
DEBUG_ENABLED = False
+ deprecated = True
@classmethod
def configuration_schema(cls):
@@ -102,9 +111,7 @@ def _get_mappings(self, url):
mappings = r.json()
except requests.HTTPError as e:
logger.exception(e)
- error = "Failed to execute query. Return Code: {0} Reason: {1}".format(
- r.status_code, r.text
- )
+ error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
mappings = None
except requests.exceptions.RequestException as e:
logger.exception(e)
@@ -122,19 +129,17 @@ def _get_query_mappings(self, url):
for index_name in mappings_data:
index_mappings = mappings_data[index_name]
for m in index_mappings.get("mappings", {}):
+ if not isinstance(index_mappings["mappings"][m], dict):
+ continue
if "properties" not in index_mappings["mappings"][m]:
continue
for property_name in index_mappings["mappings"][m]["properties"]:
- property_data = index_mappings["mappings"][m]["properties"][
- property_name
- ]
+ property_data = index_mappings["mappings"][m]["properties"][property_name]
if property_name not in mappings:
property_type = property_data.get("type", None)
if property_type:
if property_type in ELASTICSEARCH_TYPES_MAPPING:
- mappings[property_name] = ELASTICSEARCH_TYPES_MAPPING[
- property_type
- ]
+ mappings[property_name] = ELASTICSEARCH_TYPES_MAPPING[property_type]
else:
mappings[property_name] = TYPE_STRING
# raise Exception("Unknown property type: {0}".format(property_type))
@@ -143,8 +148,7 @@ def _get_query_mappings(self, url):
def get_schema(self, *args, **kwargs):
def parse_doc(doc, path=None):
- """Recursively parse a doc type dictionary
- """
+ """Recursively parse a doc type dictionary"""
path = path or []
result = []
for field, description in doc["properties"].items():
@@ -173,12 +177,8 @@ def parse_doc(doc, path=None):
schema[name]["columns"] = sorted(set(columns))
return list(schema.values())
- def _parse_results(
- self, mappings, result_fields, raw_result, result_columns, result_rows
- ):
- def add_column_if_needed(
- mappings, column_name, friendly_name, result_columns, result_columns_index
- ):
+ def _parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows): # noqa: C901
+ def add_column_if_needed(mappings, column_name, friendly_name, result_columns, result_columns_index):
if friendly_name not in result_columns_index:
result_columns.append(
{
@@ -200,14 +200,10 @@ def collect_value(mappings, row, key, value, type):
return
mappings[key] = type
- add_column_if_needed(
- mappings, key, key, result_columns, result_columns_index
- )
+ add_column_if_needed(mappings, key, key, result_columns, result_columns_index)
row[key] = value
- def collect_aggregations(
- mappings, rows, parent_key, data, row, result_columns, result_columns_index
- ):
+ def collect_aggregations(mappings, rows, parent_key, data, row, result_columns, result_columns_index):
if isinstance(data, dict):
for key, value in data.items():
val = collect_aggregations(
@@ -268,9 +264,7 @@ def collect_aggregations(
"string",
)
else:
- collect_value(
- mappings, result_row, parent_key, value["key"], "string"
- )
+ collect_value(mappings, result_row, parent_key, value["key"], "string")
return None
@@ -290,9 +284,7 @@ def collect_aggregations(
elif "aggregations" in raw_result:
if result_fields:
for field in result_fields:
- add_column_if_needed(
- mappings, field, field, result_columns, result_columns_index
- )
+ add_column_if_needed(mappings, field, field, result_columns, result_columns_index)
for key, data in raw_result["aggregations"].items():
collect_aggregations(
@@ -310,9 +302,7 @@ def collect_aggregations(
elif "hits" in raw_result and "hits" in raw_result["hits"]:
if result_fields:
for field in result_fields:
- add_column_if_needed(
- mappings, field, field, result_columns, result_columns_index
- )
+ add_column_if_needed(mappings, field, field, result_columns, result_columns_index)
for h in raw_result["hits"]["hits"]:
row = {}
@@ -322,36 +312,22 @@ def collect_aggregations(
if result_fields and column not in result_fields_index:
continue
- add_column_if_needed(
- mappings, column, column, result_columns, result_columns_index
- )
+ add_column_if_needed(mappings, column, column, result_columns, result_columns_index)
value = h[column_name][column]
- row[column] = (
- value[0]
- if isinstance(value, list) and len(value) == 1
- else value
- )
+ row[column] = value[0] if isinstance(value, list) and len(value) == 1 else value
result_rows.append(row)
else:
- raise Exception(
- "Redash failed to parse the results it got from Elasticsearch."
- )
+ raise Exception("Redash failed to parse the results it got from Elasticsearch.")
def test_connection(self):
try:
- r = requests.get(
- "{0}/_cluster/health".format(self.server_url), auth=self.auth
- )
+ r = requests.get("{0}/_cluster/health".format(self.server_url), auth=self.auth)
r.raise_for_status()
except requests.HTTPError as e:
logger.exception(e)
- raise Exception(
- "Failed to execute query. Return Code: {0} Reason: {1}".format(
- r.status_code, r.text
- )
- )
+ raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
except requests.exceptions.RequestException as e:
logger.exception(e)
raise Exception("Connection refused")
@@ -362,18 +338,14 @@ class Kibana(BaseElasticSearch):
def enabled(cls):
return True
- def _execute_simple_query(
- self, url, auth, _from, mappings, result_fields, result_columns, result_rows
- ):
+ def _execute_simple_query(self, url, auth, _from, mappings, result_fields, result_columns, result_rows):
url += "&from={0}".format(_from)
r = requests.get(url, auth=self.auth)
r.raise_for_status()
raw_result = r.json()
- self._parse_results(
- mappings, result_fields, raw_result, result_columns, result_rows
- )
+ self._parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
total = raw_result["hits"]["total"]
result_size = len(raw_result["hits"]["hits"])
@@ -420,7 +392,7 @@ def run_query(self, query, user):
_from = 0
while True:
query_size = size if limit >= (_from + size) else (limit - _from)
- total = self._execute_simple_query(
+ self._execute_simple_query(
url + "&size={0}".format(query_size),
self.auth,
_from,
@@ -436,19 +408,18 @@ def run_query(self, query, user):
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
raise Exception("Advanced queries are not supported")
- json_data = json_dumps({"columns": result_columns, "rows": result_rows})
+ data = {"columns": result_columns, "rows": result_rows}
except requests.HTTPError as e:
logger.exception(e)
- error = "Failed to execute query. Return Code: {0} Reason: {1}".format(
- r.status_code, r.text
- )
- json_data = None
+ r = e.response
+ error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
+ data = None
except requests.exceptions.RequestException as e:
logger.exception(e)
error = "Connection refused"
- json_data = None
+ data = None
- return json_data, error
+ return data, error
class ElasticSearch(BaseElasticSearch):
@@ -489,26 +460,22 @@ def run_query(self, query, user):
result_columns = []
result_rows = []
- self._parse_results(
- mappings, result_fields, r.json(), result_columns, result_rows
- )
+ self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows)
- json_data = json_dumps({"columns": result_columns, "rows": result_rows})
- except (KeyboardInterrupt, JobTimeoutException):
+ data = {"columns": result_columns, "rows": result_rows}
+ except (KeyboardInterrupt, JobTimeoutException) as e:
logger.exception(e)
raise
except requests.HTTPError as e:
logger.exception(e)
- error = "Failed to execute query. Return Code: {0} Reason: {1}".format(
- r.status_code, r.text
- )
- json_data = None
+ error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
+ data = None
except requests.exceptions.RequestException as e:
logger.exception(e)
error = "Connection refused"
- json_data = None
+ data = None
- return json_data, error
+ return data, error
register(Kibana)
diff --git a/redash/query_runner/elasticsearch2.py b/redash/query_runner/elasticsearch2.py
new file mode 100644
index 0000000000..3570d10b65
--- /dev/null
+++ b/redash/query_runner/elasticsearch2.py
@@ -0,0 +1,308 @@
+import json
+import logging
+from typing import Optional, Tuple
+
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseHTTPQueryRunner,
+ register,
+)
+
+logger = logging.getLogger(__name__)
+
+ELASTICSEARCH_TYPES_MAPPING = {
+ "integer": TYPE_INTEGER,
+ "long": TYPE_INTEGER,
+ "float": TYPE_FLOAT,
+ "double": TYPE_FLOAT,
+ "boolean": TYPE_BOOLEAN,
+ "string": TYPE_STRING,
+ "date": TYPE_DATE,
+ "object": TYPE_STRING,
+}
+
+
+TYPES_MAP = {
+ str: TYPE_STRING,
+ int: TYPE_INTEGER,
+ float: TYPE_FLOAT,
+ bool: TYPE_BOOLEAN,
+}
+
+
+class ElasticSearch2(BaseHTTPQueryRunner):
+ should_annotate_query = False
+
+ @classmethod
+ def name(cls):
+ return "Elasticsearch"
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.syntax = "json"
+
+ def get_response(self, url, auth=None, http_method="get", **kwargs):
+ url = "{}{}".format(self.configuration["server"], url)
+ headers = kwargs.pop("headers", {})
+ headers["Accept"] = "application/json"
+ return super().get_response(url, auth, http_method, headers=headers, **kwargs)
+
+ def test_connection(self):
+ _, error = self.get_response("/_cluster/health")
+ if error is not None:
+ raise Exception(error)
+
+ def run_query(self, query, user):
+ query, url, result_fields = self._build_query(query)
+ response, error = self.get_response(url, http_method="post", json=query)
+ query_results = response.json()
+ data = self._parse_results(result_fields, query_results)
+ error = None
+ return data, error
+
+ def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
+ query = json.loads(query)
+ index_name = query.pop("index", "")
+ result_fields = query.pop("result_fields", None)
+ url = "/{}/_search".format(index_name)
+ return query, url, result_fields
+
+ @classmethod
+ def _parse_mappings(cls, mappings_data: dict):
+ mappings = {}
+
+ def _parse_properties(prefix: str, properties: dict):
+ for property_name, property_data in properties.items():
+ if property_name not in mappings:
+ property_type = property_data.get("type", None)
+ nested_properties = property_data.get("properties", None)
+ if property_type:
+ mappings[index_name][prefix + property_name] = ELASTICSEARCH_TYPES_MAPPING.get(
+ property_type, TYPE_STRING
+ )
+ elif nested_properties:
+ new_prefix = prefix + property_name + "."
+ _parse_properties(new_prefix, nested_properties)
+
+ for index_name in mappings_data:
+ mappings[index_name] = {}
+ index_mappings = mappings_data[index_name]
+ try:
+ for m in index_mappings.get("mappings", {}):
+ _parse_properties("", index_mappings["mappings"][m]["properties"])
+ except KeyError:
+ _parse_properties("", index_mappings["mappings"]["properties"])
+
+ return mappings
+
+ def get_mappings(self):
+ response, error = self.get_response("/_mappings")
+ return self._parse_mappings(response.json())
+
+ def get_schema(self, *args, **kwargs):
+ schema = {}
+ for name, columns in self.get_mappings().items():
+ schema[name] = {"name": name, "columns": list(columns.keys())}
+ return list(schema.values())
+
+ @classmethod
+ def _parse_results(cls, result_fields, raw_result): # noqa: C901
+ result_columns = []
+ result_rows = []
+ result_columns_index = {c["name"]: c for c in result_columns}
+ result_fields_index = {}
+
+ def add_column_if_needed(column_name, value=None):
+ if column_name not in result_columns_index:
+ result_columns.append(
+ {
+ "name": column_name,
+ "friendly_name": column_name,
+ "type": TYPES_MAP.get(type(value), TYPE_STRING),
+ }
+ )
+ result_columns_index[column_name] = result_columns[-1]
+
+ def get_row(rows, row):
+ if row is None:
+ row = {}
+ rows.append(row)
+ return row
+
+ def collect_value(row, key, value):
+ if result_fields and key not in result_fields_index:
+ return
+
+ add_column_if_needed(key, value)
+ row[key] = value
+
+ def parse_bucket_to_row(data, row, agg_key):
+ sub_agg_key = ""
+ for key, item in data.items():
+ if key == "key_as_string":
+ continue
+ if key == "key":
+ if "key_as_string" in data:
+ collect_value(row, agg_key, data["key_as_string"])
+ else:
+ collect_value(row, agg_key, data["key"])
+ continue
+
+ if isinstance(item, (str, int, float)):
+ collect_value(row, agg_key + "." + key, item)
+ elif isinstance(item, dict):
+ if "buckets" not in item:
+ for sub_key, sub_item in item.items():
+ collect_value(
+ row,
+ agg_key + "." + key + "." + sub_key,
+ sub_item,
+ )
+ else:
+ sub_agg_key = key
+
+ return sub_agg_key
+
+ def parse_buckets_list(rows, parent_key, data, row, depth):
+ if len(rows) > 0 and depth == 0:
+ row = rows.pop()
+
+ for value in data:
+ row = row.copy()
+ sub_agg_key = parse_bucket_to_row(value, row, parent_key)
+
+ if sub_agg_key == "":
+ rows.append(row)
+ else:
+ depth += 1
+ parse_buckets_list(rows, sub_agg_key, value[sub_agg_key]["buckets"], row, depth)
+
+ def collect_aggregations(rows, parent_key, data, row, depth):
+ row = get_row(rows, row)
+ parse_bucket_to_row(data, row, parent_key)
+
+ if "buckets" in data:
+ parse_buckets_list(rows, parent_key, data["buckets"], row, depth)
+
+ return None
+
+ def get_flatten_results(dd, separator=".", prefix=""):
+ if isinstance(dd, dict):
+ return {
+ prefix + separator + k if prefix else k: v
+ for kk, vv in dd.items()
+ for k, v in get_flatten_results(vv, separator, kk).items()
+ }
+ elif isinstance(dd, list) and len(dd) == 1:
+ return {prefix: dd[0]}
+ else:
+ return {prefix: dd}
+
+ if result_fields:
+ for r in result_fields:
+ result_fields_index[r] = None
+
+ if "error" in raw_result:
+ error = raw_result["error"]
+ if len(error) > 10240:
+ error = error[:10240] + "... continues"
+
+ raise Exception(error)
+ elif "aggregations" in raw_result:
+ for key, data in raw_result["aggregations"].items():
+ collect_aggregations(result_rows, key, data, None, 0)
+
+ elif "hits" in raw_result and "hits" in raw_result["hits"]:
+ for h in raw_result["hits"]["hits"]:
+ row = {}
+
+ fields_parameter_name = "_source" if "_source" in h else "fields"
+ for column in h[fields_parameter_name]:
+ if result_fields and column not in result_fields_index:
+ continue
+
+ unested_results = get_flatten_results({column: h[fields_parameter_name][column]})
+
+ for column_name, value in unested_results.items():
+ add_column_if_needed(column_name, value=value)
+ row[column_name] = value
+
+ result_rows.append(row)
+ else:
+ raise Exception("Redash failed to parse the results it got from Elasticsearch.")
+
+ return {"columns": result_columns, "rows": result_rows}
+
+
+class OpenDistroSQLElasticSearch(ElasticSearch2):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.syntax = "sql"
+
+ def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
+ sql_query = {"query": query}
+ sql_query_url = "/_opendistro/_sql"
+ return sql_query, sql_query_url, None
+
+ @classmethod
+ def name(cls):
+ return "Open Distro SQL Elasticsearch"
+
+ @classmethod
+ def type(cls):
+ return "elasticsearch2_OpenDistroSQLElasticSearch"
+
+
+class XPackSQLElasticSearch(ElasticSearch2):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.syntax = "sql"
+
+ def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
+ sql_query = {"query": query}
+ sql_query_url = "/_xpack/sql"
+ return sql_query, sql_query_url, None
+
+ @classmethod
+ def _parse_results(cls, result_fields, raw_result):
+ error = raw_result.get("error")
+ if error:
+ raise Exception(error)
+
+ rv = {
+ "columns": [
+ {
+ "name": c["name"],
+ "friendly_name": c["name"],
+ "type": ELASTICSEARCH_TYPES_MAPPING.get(c["type"], "string"),
+ }
+ for c in raw_result["columns"]
+ ],
+ "rows": [],
+ }
+ query_results_rows = raw_result["rows"]
+
+ for query_results_row in query_results_rows:
+ result_row = dict()
+ for column, column_value in zip(rv["columns"], query_results_row):
+ result_row[column["name"]] = column_value
+ rv["rows"].append(result_row)
+
+ return rv
+
+ @classmethod
+ def name(cls):
+ return "X-Pack SQL Elasticsearch"
+
+ @classmethod
+ def type(cls):
+ return "elasticsearch2_XPackSQLElasticSearch"
+
+
+register(ElasticSearch2)
+register(OpenDistroSQLElasticSearch)
+register(XPackSQLElasticSearch)
diff --git a/redash/query_runner/exasol.py b/redash/query_runner/exasol.py
index 5bdbae5fc5..a5fdd7df13 100644
--- a/redash/query_runner/exasol.py
+++ b/redash/query_runner/exasol.py
@@ -1,7 +1,14 @@
import datetime
-from redash.query_runner import *
-from redash.utils import json_dumps
+from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
def _exasol_type_mapper(val, data_type):
@@ -95,21 +102,19 @@ def run_query(self, query, user):
try:
statement = connection.execute(query)
columns = [
- {"name": n, "friendly_name": n, "type": _type_mapper(t)}
- for (n, t) in statement.columns().items()
+ {"name": n, "friendly_name": n, "type": _type_mapper(t)} for (n, t) in statement.columns().items()
]
cnames = statement.column_names()
rows = [dict(zip(cnames, row)) for row in statement]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
finally:
if statement is not None:
statement.close()
connection.close()
- return json_data, error
+ return data, error
def get_schema(self, get_stats=False):
query = """
@@ -126,7 +131,7 @@ def get_schema(self, get_stats=False):
statement = connection.execute(query)
result = {}
- for (schema, table_name, column) in statement:
+ for schema, table_name, column in statement:
table_name_with_schema = "%s.%s" % (schema, table_name)
if table_name_with_schema not in result:
diff --git a/redash/query_runner/files/rds-combined-ca-bundle.pem b/redash/query_runner/files/rds-combined-ca-bundle.pem
index fe486180f2..de68d41a0f 100644
--- a/redash/query_runner/files/rds-combined-ca-bundle.pem
+++ b/redash/query_runner/files/rds-combined-ca-bundle.pem
@@ -1,530 +1,50 @@
-----BEGIN CERTIFICATE-----
-MIID9DCCAtygAwIBAgIBQjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUwOTExMzFaFw0y
-MDAzMDUwOTExMzFaMIGKMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEbMBkGA1UEAwwSQW1hem9uIFJE
-UyBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuD8nrZ8V
-u+VA8yVlUipCZIKPTDcOILYpUe8Tct0YeQQr0uyl018StdBsa3CjBgvwpDRq1HgF
-Ji2N3+39+shCNspQeE6aYU+BHXhKhIIStt3r7gl/4NqYiDDMWKHxHq0nsGDFfArf
-AOcjZdJagOMqb3fF46flc8k2E7THTm9Sz4L7RY1WdABMuurpICLFE3oHcGdapOb9
-T53pQR+xpHW9atkcf3pf7gbO0rlKVSIoUenBlZipUlp1VZl/OD/E+TtRhDDNdI2J
-P/DSMM3aEsq6ZQkfbz/Ilml+Lx3tJYXUDmp+ZjzMPLk/+3beT8EhrwtcG3VPpvwp
-BIOqsqVVTvw/CwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUTgLurD72FchM7Sz1BcGPnIQISYMwHwYDVR0jBBgwFoAU
-TgLurD72FchM7Sz1BcGPnIQISYMwDQYJKoZIhvcNAQEFBQADggEBAHZcgIio8pAm
-MjHD5cl6wKjXxScXKtXygWH2BoDMYBJF9yfyKO2jEFxYKbHePpnXB1R04zJSWAw5
-2EUuDI1pSBh9BA82/5PkuNlNeSTB3dXDD2PEPdzVWbSKvUB8ZdooV+2vngL0Zm4r
-47QPyd18yPHrRIbtBtHR/6CwKevLZ394zgExqhnekYKIqqEX41xsUV0Gm6x4vpjf
-2u6O/+YE2U+qyyxHE5Wd5oqde0oo9UUpFETJPVb6Q2cEeQib8PBAyi0i6KnF+kIV
-A9dY7IHSubtCK/i8wxMVqfd5GtbA8mmpeJFwnDvm9rBEsHybl08qlax9syEwsUYr
-/40NawZfTUU=
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIEATCCAumgAwIBAgIBRDANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMDZaFw0y
-MDAzMDUyMjAzMDZaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJE
-UyBhcC1ub3J0aGVhc3QtMSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAMmM2B4PfTXCZjbZMWiDPyxvk/eeNwIRJAhfzesiGUiLozX6CRy3rwC1ZOPV
-AcQf0LB+O8wY88C/cV+d4Q2nBDmnk+Vx7o2MyMh343r5rR3Na+4izd89tkQVt0WW
-vO21KRH5i8EuBjinboOwAwu6IJ+HyiQiM0VjgjrmEr/YzFPL8MgHD/YUHehqjACn
-C0+B7/gu7W4qJzBL2DOf7ub2qszGtwPE+qQzkCRDwE1A4AJmVE++/FLH2Zx78Egg
-fV1sUxPtYgjGH76VyyO6GNKM6rAUMD/q5mnPASQVIXgKbupr618bnH+SWHFjBqZq
-HvDGPMtiiWII41EmGUypyt5AbysCAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIG
-A1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFIiKM0Q6n1K4EmLxs3ZXxINbwEwR
-MB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEBBQUA
-A4IBAQBezGbE9Rw/k2e25iGjj5n8r+M3dlye8ORfCE/dijHtxqAKasXHgKX8I9Tw
-JkBiGWiuzqn7gO5MJ0nMMro1+gq29qjZnYX1pDHPgsRjUX8R+juRhgJ3JSHijRbf
-4qNJrnwga7pj94MhcLq9u0f6dxH6dXbyMv21T4TZMTmcFduf1KgaiVx1PEyJjC6r
-M+Ru+A0eM+jJ7uCjUoZKcpX8xkj4nmSnz9NMPog3wdOSB9cAW7XIc5mHa656wr7I
-WJxVcYNHTXIjCcng2zMKd1aCcl2KSFfy56sRfT7J5Wp69QSr+jq8KM55gw8uqAwi
-VPrXn2899T1rcTtFYFP16WXjGuc0
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIEATCCAumgAwIBAgIBRTANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMTlaFw0y
-MDAzMDUyMjAzMTlaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJE
-UyBhcC1zb3V0aGVhc3QtMSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBANaXElmSEYt/UtxHFsARFhSUahTf1KNJzR0Dmay6hqOXQuRVbKRwPd19u5vx
-DdF1sLT7D69IK3VDnUiQScaCv2Dpu9foZt+rLx+cpx1qiQd1UHrvqq8xPzQOqCdC
-RFStq6yVYZ69yfpfoI67AjclMOjl2Vph3ftVnqP0IgVKZdzeC7fd+umGgR9xY0Qr
-Ubhd/lWdsbNvzK3f1TPWcfIKQnpvSt85PIEDJir6/nuJUKMtmJRwTymJf0i+JZ4x
-7dJa341p2kHKcHMgOPW7nJQklGBA70ytjUV6/qebS3yIugr/28mwReflg3TJzVDl
-EOvi6pqbqNbkMuEwGDCmEQIVqgkCAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIG
-A1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFAu93/4k5xbWOsgdCdn+/KdiRuit
-MB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEBBQUA
-A4IBAQBlcjSyscpPjf5+MgzMuAsCxByqUt+WFspwcMCpwdaBeHOPSQrXNqX2Sk6P
-kth6oCivA64trWo8tFMvPYlUA1FYVD5WpN0kCK+P5pD4KHlaDsXhuhClJzp/OP8t
-pOyUr5109RHLxqoKB5J5m1XA7rgcFjnMxwBSWFe3/4uMk/+4T53YfCVXuc6QV3i7
-I/2LAJwFf//pTtt6fZenYfCsahnr2nvrNRNyAxcfvGZ/4Opn/mJtR6R/AjvQZHiR
-bkRNKF2GW0ueK5W4FkZVZVhhX9xh1Aj2Ollb+lbOqADaVj+AT3PoJPZ3MPQHKCXm
-xwG0LOLlRr/TfD6li1AfOVTAJXv9
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIEATCCAumgAwIBAgIBRjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMjRaFw0y
-MDAzMDUyMjAzMjRaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJE
-UyBhcC1zb3V0aGVhc3QtMiBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAJqBAJutz69hFOh3BtLHZTbwE8eejGGKayn9hu98YMDPzWzGXWCmW+ZYWELA
-cY3cNWNF8K4FqKXFr2ssorBYim1UtYFX8yhydT2hMD5zgQ2sCGUpuidijuPA6zaq
-Z3tdhVR94f0q8mpwpv2zqR9PcqaGDx2VR1x773FupRPRo7mEW1vC3IptHCQlP/zE
-7jQiLl28bDIH2567xg7e7E9WnZToRnhlYdTaDaJsHTzi5mwILi4cihSok7Shv/ME
-hnukvxeSPUpaVtFaBhfBqq055ePq9I+Ns4KGreTKMhU0O9fkkaBaBmPaFgmeX/XO
-n2AX7gMouo3mtv34iDTZ0h6YCGkCAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIG
-A1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFIlQnY0KHYWn1jYumSdJYfwj/Nfw
-MB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEBBQUA
-A4IBAQA0wVU6/l41cTzHc4azc4CDYY2Wd90DFWiH9C/mw0SgToYfCJ/5Cfi0NT/Y
-PRnk3GchychCJgoPA/k9d0//IhYEAIiIDjyFVgjbTkKV3sh4RbdldKVOUB9kumz/
-ZpShplsGt3z4QQiVnKfrAgqxWDjR0I0pQKkxXa6Sjkicos9LQxVtJ0XA4ieG1E7z
-zJr+6t80wmzxvkInSaWP3xNJK9azVRTrgQZQlvkbpDbExl4mNTG66VD3bAp6t3Wa
-B49//uDdfZmPkqqbX+hsxp160OH0rxJppwO3Bh869PkDnaPEd/Pxw7PawC+li0gi
-NRV8iCEx85aFxcyOhqn0WZOasxee
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/zCCAuegAwIBAgIBRzANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMzFaFw0y
-MDAzMDUyMjAzMzFaMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEjMCEGA1UEAwwaQW1hem9uIFJE
-UyBldS1jZW50cmFsLTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQDFtP2dhSLuaPOI4ZrrPWsK4OY9ocQBp3yApH1KJYmI9wpQKZG/KCH2E6Oo7JAw
-QORU519r033T+FO2Z7pFPlmz1yrxGXyHpJs8ySx3Yo5S8ncDCdZJCLmtPiq/hahg
-5/0ffexMFUCQaYicFZsrJ/cStdxUV+tSw2JQLD7UxS9J97LQWUPyyG+ZrjYVTVq+
-zudnFmNSe4QoecXMhAFTGJFQXxP7nhSL9Ao5FGgdXy7/JWeWdQIAj8ku6cBDKPa6
-Y6kP+ak+In+Lye8z9qsCD/afUozfWjPR2aA4JoIZVF8dNRShIMo8l0XfgfM2q0+n
-ApZWZ+BjhIO5XuoUgHS3D2YFAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNV
-HRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBRm4GsWIA/M6q+tK8WGHWDGh2gcyTAf
-BgNVHSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOC
-AQEAHpMmeVQNqcxgfQdbDIi5UIy+E7zZykmtAygN1XQrvga9nXTis4kOTN6g5/+g
-HCx7jIXeNJzAbvg8XFqBN84Quqgpl/tQkbpco9Jh1HDs558D5NnZQxNqH5qXQ3Mm
-uPgCw0pYcPOa7bhs07i+MdVwPBsX27CFDtsgAIru8HvKxY1oTZrWnyIRo93tt/pk
-WuItVMVHjaQZVfTCow0aDUbte6Vlw82KjUFq+n2NMSCJDiDKsDDHT6BJc4AJHIq3
-/4Z52MSC9KMr0yAaaoWfW/yMEj9LliQauAgwVjArF4q78rxpfKTG9Rfd8U1BZANP
-7FrFMN0ThjfA1IvmOYcgskY5bQ==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/DCCAuSgAwIBAgIBSDANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMzVaFw0y
-MDAzMDUyMjAzMzVaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
-UyBldS13ZXN0LTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx
-PdbqQ0HKRj79Pmocxvjc+P6i4Ux24kgFIl+ckiir1vzkmesc3a58gjrMlCksEObt
-Yihs5IhzEq1ePT0gbfS9GYFp34Uj/MtPwlrfCBWG4d2TcrsKRHr1/EXUYhWqmdrb
-RhX8XqoRhVkbF/auzFSBhTzcGGvZpQ2KIaxRcQfcXlMVhj/pxxAjh8U4F350Fb0h
-nX1jw4/KvEreBL0Xb2lnlGTkwVxaKGSgXEnOgIyOFdOQc61vdome0+eeZsP4jqeR
-TGYJA9izJsRbe2YJxHuazD+548hsPlM3vFzKKEVURCha466rAaYAHy3rKur3HYQx
-Yt+SoKcEz9PXuSGj96ejAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
-Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBTebg//h2oeXbZjQ4uuoiuLYzuiPDAfBgNV
-HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
-TikPaGeZasTPw+4RBemlsyPAjtFFQLo7ddaFdORLgdEysVf8aBqndvbA6MT/v4lj
-GtEtUdF59ZcbWOrVm+fBZ2h/jYJ59dYF/xzb09nyRbdMSzB9+mkSsnOMqluq5y8o
-DY/PfP2vGhEg/2ZncRC7nlQU1Dm8F4lFWEiQ2fi7O1cW852Vmbq61RIfcYsH/9Ma
-kpgk10VZ75b8m3UhmpZ/2uRY+JEHImH5WpcTJ7wNiPNJsciZMznGtrgOnPzYco8L
-cDleOASIZifNMQi9PKOJKvi0ITz0B/imr8KBsW0YjZVJ54HMa7W1lwugSM7aMAs+
-E3Sd5lS+SHwWaOCHwhOEVA==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/DCCAuSgAwIBAgIBSTANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzNDBaFw0y
-MDAzMDUyMjAzNDBaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
-UyBzYS1lYXN0LTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCU
-X4OBnQ5xA6TLJAiFEI6l7bUWjoVJBa/VbMdCCSs2i2dOKmqUaXu2ix2zcPILj3lZ
-GMk3d/2zvTK/cKhcFrewHUBamTeVHdEmynhMQamqNmkM4ptYzFcvEUw1TGxHT4pV
-Q6gSN7+/AJewQvyHexHo8D0+LDN0/Wa9mRm4ixCYH2CyYYJNKaZt9+EZfNu+PPS4
-8iB0TWH0DgQkbWMBfCRgolLLitAZklZ4dvdlEBS7evN1/7ttBxUK6SvkeeSx3zBl
-ww3BlXqc3bvTQL0A+RRysaVyFbvtp9domFaDKZCpMmDFAN/ntx215xmQdrSt+K3F
-cXdGQYHx5q410CAclGnbAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
-Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBT6iVWnm/uakS+tEX2mzIfw+8JL0zAfBgNV
-HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
-FmDD+QuDklXn2EgShwQxV13+txPRuVdOSrutHhoCgMwFWCMtPPtBAKs6KPY7Guvw
-DpJoZSehDiOfsgMirjOWjvfkeWSNvKfjWTVneX7pZD9W5WPnsDBvTbCGezm+v87z
-b+ZM2ZMo98m/wkMcIEAgdSKilR2fuw8rLkAjhYFfs0A7tDgZ9noKwgHvoE4dsrI0
-KZYco6DlP/brASfHTPa2puBLN9McK3v+h0JaSqqm5Ro2Bh56tZkQh8AWy/miuDuK
-3+hNEVdxosxlkM1TPa1DGj0EzzK0yoeerXuH2HX7LlCrrxf6/wdKnjR12PMrLQ4A
-pCqkcWw894z6bV9MAvKe6A==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/DCCAuSgAwIBAgIBQzANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMTU0MDRaFw0y
-MDAzMDUyMTU0MDRaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
-UyB1cy1lYXN0LTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDI
-UIuwh8NusKHk1SqPXcP7OqxY3S/M2ZyQWD3w7Bfihpyyy/fc1w0/suIpX3kbMhAV
-2ESwged2/2zSx4pVnjp/493r4luhSqQYzru78TuPt9bhJIJ51WXunZW2SWkisSaf
-USYUzVN9ezR/bjXTumSUQaLIouJt3OHLX49s+3NAbUyOI8EdvgBQWD68H1epsC0n
-CI5s+pIktyOZ59c4DCDLQcXErQ+tNbDC++oct1ANd/q8p9URonYwGCGOBy7sbCYq
-9eVHh1Iy2M+SNXddVOGw5EuruvHoCIQyOz5Lz4zSuZA9dRbrfztNOpezCNYu6NKM
-n+hzcvdiyxv77uNm8EaxAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
-Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBQSQG3TmMe6Sa3KufaPBa72v4QFDzAfBgNV
-HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
-L/mOZfB3187xTmjOHMqN2G2oSKHBKiQLM9uv8+97qT+XR+TVsBT6b3yoPpMAGhHA
-Pc7nxAF5gPpuzatx0OTLPcmYucFmfqT/1qA5WlgCnMNtczyNMH97lKFTNV7Njtek
-jWEzAEQSyEWrkNpNlC4j6kMYyPzVXQeXUeZTgJ9FNnVZqmvfjip2N22tawMjrCn5
-7KN/zN65EwY2oO9XsaTwwWmBu3NrDdMbzJnbxoWcFWj4RBwanR1XjQOVNhDwmCOl
-/1Et13b8CPyj69PC8BOVU6cfTSx8WUVy0qvYOKHNY9Bqa5BDnIL3IVmUkeTlM1mt
-enRpyBj+Bk9rh/ICdiRKmA==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/DCCAuSgAwIBAgIBSjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzNDVaFw0y
-MDAzMDUyMjAzNDVaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
-UyB1cy13ZXN0LTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDE
-Dhw+uw/ycaiIhhyu2pXFRimq0DlB8cNtIe8hdqndH8TV/TFrljNgR8QdzOgZtZ9C
-zzQ2GRpInN/qJF6slEd6wO+6TaDBQkPY+07TXNt52POFUhdVkhJXHpE2BS7Xn6J7
-7RFAOeG1IZmc2DDt+sR1BgXzUqHslQGfFYNS0/MBO4P+ya6W7IhruB1qfa4HiYQS
-dbe4MvGWnv0UzwAqdR7OF8+8/5c58YXZIXCO9riYF2ql6KNSL5cyDPcYK5VK0+Q9
-VI6vuJHSMYcF7wLePw8jtBktqAFE/wbdZiIHhZvNyiNWPPNTGUmQbaJ+TzQEHDs5
-8en+/W7JKnPyBOkxxENbAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
-Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBS0nw/tFR9bCjgqWTPJkyy4oOD8bzAfBgNV
-HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
-CXGAY3feAak6lHdqj6+YWjy6yyUnLK37bRxZDsyDVXrPRQaXRzPTzx79jvDwEb/H
-Q/bdQ7zQRWqJcbivQlwhuPJ4kWPUZgSt3JUUuqkMsDzsvj/bwIjlrEFDOdHGh0mi
-eVIngFEjUXjMh+5aHPEF9BlQnB8LfVtKj18e15UDTXFa+xJPFxUR7wDzCfo4WI1m
-sUMG4q1FkGAZgsoyFPZfF8IVvgCuGdR8z30VWKklFxttlK0eGLlPAyIO0CQxPQlo
-saNJrHf4tLOgZIWk+LpDhNd9Et5EzvJ3aURUsKY4pISPPF5WdvM9OE59bERwUErd
-nuOuQWQeeadMceZnauRzJQ==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/DCCAuSgAwIBAgIBSzANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzNTBaFw0y
-MDAzMDUyMjAzNTBaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
-UyB1cy13ZXN0LTIgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDM
-H58SR48U6jyERC1vYTnub34smf5EQVXyzaTmspWGWGzT31NLNZGSDFaa7yef9kdO
-mzJsgebR5tXq6LdwlIoWkKYQ7ycUaadtVKVYdI40QcI3cHn0qLFlg2iBXmWp/B+i
-Z34VuVlCh31Uj5WmhaBoz8t/GRqh1V/aCsf3Wc6jCezH3QfuCjBpzxdOOHN6Ie2v
-xX09O5qmZTvMoRBAvPkxdaPg/Mi7fxueWTbEVk78kuFbF1jHYw8U1BLILIAhcqlq
-x4u8nl73t3O3l/soNUcIwUDK0/S+Kfqhwn9yQyPlhb4Wy3pfnZLJdkyHldktnQav
-9TB9u7KH5Lk0aAYslMLxAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
-Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBT8roM4lRnlFHWMPWRz0zkwFZog1jAfBgNV
-HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
-JwrxwgwmPtcdaU7O7WDdYa4hprpOMamI49NDzmE0s10oGrqmLwZygcWU0jT+fJ+Y
-pJe1w0CVfKaeLYNsOBVW3X4ZPmffYfWBheZiaiEflq/P6t7/Eg81gaKYnZ/x1Dfa
-sUYkzPvCkXe9wEz5zdUTOCptDt89rBR9CstL9vE7WYUgiVVmBJffWbHQLtfjv6OF
-NMb0QME981kGRzc2WhgP71YS2hHd1kXtsoYP1yTu4vThSKsoN4bkiHsaC1cRkLoy
-0fFA4wpB3WloMEvCDaUvvH1LZlBXTNlwi9KtcwD4tDxkkBt4tQczKLGpQ/nF/W9n
-8YDWk3IIc1sd0bkZqoau2Q==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIEATCCAumgAwIBAgIBTDANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTExMDYwMDA1NDZaFw0y
-MDAzMDUwMDA1NDZaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJE
-UyBhcC1ub3J0aGVhc3QtMiBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAKSwd+RVUzTRH0FgnbwoTK8TMm/zMT4+2BvALpAUe6YXbkisg2goycWuuWLg
-jOpFBB3GtyvXZnkqi7MkDWUmj1a2kf8l2oLyoaZ+Hm9x/sV+IJzOqPvj1XVUGjP6
-yYYnPJmUYqvZeI7fEkIGdFkP2m4/sgsSGsFvpD9FK1bL1Kx2UDpYX0kHTtr18Zm/
-1oN6irqWALSmXMDydb8hE0FB2A1VFyeKE6PnoDj/Y5cPHwPPdEi6/3gkDkSaOG30
-rWeQfL3pOcKqzbHaWTxMphd0DSL/quZ64Nr+Ly65Q5PRcTrtr55ekOUziuqXwk+o
-9QpACMwcJ7ROqOznZTqTzSFVXFECAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIG
-A1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFM6Nox/QWbhzWVvzoJ/y0kGpNPK+
-MB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEBBQUA
-A4IBAQCTkWBqNvyRf3Y/W21DwFx3oT/AIWrHt0BdGZO34tavummXemTH9LZ/mqv9
-aljt6ZuDtf5DEQjdsAwXMsyo03ffnP7doWm8iaF1+Mui77ot0TmTsP/deyGwukvJ
-tkxX8bZjDh+EaNauWKr+CYnniNxCQLfFtXYJsfOdVBzK3xNL+Z3ucOQRhr2helWc
-CDQgwfhP1+3pRVKqHvWCPC4R3fT7RZHuRmZ38kndv476GxRntejh+ePffif78bFI
-3rIZCPBGobrrUMycafSbyXteoGca/kA+/IqrAPlk0pWQ4aEL0yTWN2h2dnjoD7oX
-byIuL/g9AGRh97+ssn7D6bDRPTbW
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/TCCAuWgAwIBAgIBTTANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNjA1MDMyMTI5MjJaFw0y
-MDAzMDUyMTI5MjJaMIGQMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEhMB8GA1UEAwwYQW1hem9uIFJE
-UyBhcC1zb3V0aC0xIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
-06eWGLE0TeqL9kyWOLkS8q0fXO97z+xyBV3DKSB2lg2GkgBz3B98MkmkeB0SZy3G
-Ce4uCpCPbFKiFEdiUclOlhZsrBuCeaimxLM3Ig2wuenElO/7TqgaYHYUbT3d+VQW
-GUbLn5GRZJZe1OAClYdOWm7A1CKpuo+cVV1vxbY2nGUQSJPpVn2sT9gnwvjdE60U
-JGYU/RLCTm8zmZBvlWaNIeKDnreIc4rKn6gUnJ2cQn1ryCVleEeyc3xjYDSrjgdn
-FLYGcp9mphqVT0byeQMOk0c7RHpxrCSA0V5V6/CreFV2LteK50qcDQzDSM18vWP/
-p09FoN8O7QrtOeZJzH/lmwIDAQABo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0T
-AQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU2i83QHuEl/d0keXF+69HNJph7cMwHwYD
-VR0jBBgwFoAUTgLurD72FchM7Sz1BcGPnIQISYMwDQYJKoZIhvcNAQELBQADggEB
-ACqnH2VjApoDqoSQOky52QBwsGaj+xWYHW5Gm7EvCqvQuhWMkeBuD6YJmMvNyA9G
-I2lh6/o+sUk/RIsbYbxPRdhNPTOgDR9zsNRw6qxaHztq/CEC+mxDCLa3O1hHBaDV
-BmB3nCZb93BvO0EQSEk7aytKq/f+sjyxqOcs385gintdHGU9uM7gTZHnU9vByJsm
-/TL07Miq67X0NlhIoo3jAk+xHaeKJdxdKATQp0448P5cY20q4b8aMk1twcNaMvCP
-dG4M5doaoUA8OQ/0ukLLae/LBxLeTw04q1/a2SyFaVUX2Twbb1S3xVWwLA8vsyGr
-igXx7B5GgP+IHb6DTjPJAi0=
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/DCCAuSgAwIBAgIBTjANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNjA4MTExOTU4NDVaFw0y
-MDAzMDUxOTU4NDVaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
-UyB1cy1lYXN0LTIgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCp
-WnnUX7wM0zzstccX+4iXKJa9GR0a2PpvB1paEX4QRCgfhEdQWDaSqyrWNgdVCKkt
-1aQkWu5j6VAC2XIG7kKoonm1ZdBVyBLqW5lXNywlaiU9yhJkwo8BR+/OqgE+PLt/
-EO1mlN0PQudja/XkExCXTO29TG2j7F/O7hox6vTyHNHc0H88zS21uPuBE+jivViS
-yzj/BkyoQ85hnkues3f9R6gCGdc+J51JbZnmgzUkvXjAEuKhAm9JksVOxcOKUYe5
-ERhn0U9zjzpfbAITIkul97VVa5IxskFFTHIPJbvRKHJkiF6wTJww/tc9wm+fSCJ1
-+DbQTGZgkQ3bJrqRN29/AgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
-Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBSAHQzUYYZbepwKEMvGdHp8wzHnfDAfBgNV
-HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQsFAAOCAQEA
-MbaEzSYZ+aZeTBxf8yi0ta8K4RdwEJsEmP6IhFFQHYUtva2Cynl4Q9tZg3RMsybT
-9mlnSQQlbN/wqIIXbkrcgFcHoXG9Odm/bDtUwwwDaiEhXVfeQom3G77QHOWMTCGK
-qadwuh5msrb17JdXZoXr4PYHDKP7j0ONfAyFNER2+uecblHfRSpVq5UeF3L6ZJb8
-fSw/GtAV6an+/0r+Qm+PiI2H5XuZ4GmRJYnGMhqWhBYrY7p3jtVnKcsh39wgfUnW
-AvZEZG/yhFyAZW0Essa39LiL5VSq14Y1DOj0wgnhSY/9WHxaAo1HB1T9OeZknYbD
-fl/EGSZ0TEvZkENrXcPlVA==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/zCCAuegAwIBAgIBTzANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNjA5MTUwMDEwMTFaFw0y
-MDAzMDUwMDEwMTFaMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEjMCEGA1UEAwwaQW1hem9uIFJE
-UyBjYS1jZW50cmFsLTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCZYI/iQ6DrS3ny3t1EwX1wAD+3LMgh7Fd01EW5LIuaK2kYIIQpsVKhxLCit/V5
-AGc/1qiJS1Qz9ODLTh0Na6bZW6EakRzuHJLe32KJtoFYPC7Z09UqzXrpA/XL+1hM
-P0ZmCWsU7Nn/EmvfBp9zX3dZp6P6ATrvDuYaVFr+SA7aT3FXpBroqBS1fyzUPs+W
-c6zTR6+yc4zkHX0XQxC5RH6xjgpeRkoOajA/sNo7AQF7KlWmKHbdVF44cvvAhRKZ
-XaoVs/C4GjkaAEPTCbopYdhzg+KLx9eB2BQnYLRrIOQZtRfbQI2Nbj7p3VsRuOW1
-tlcks2w1Gb0YC6w6SuIMFkl1AgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNV
-HRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBToYWxE1lawl6Ks6NsvpbHQ3GKEtzAf
-BgNVHSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQsFAAOC
-AQEAG/8tQ0ooi3hoQpa5EJz0/E5VYBsAz3YxA2HoIonn0jJyG16bzB4yZt4vNQMA
-KsNlQ1uwDWYL1nz63axieUUFIxqxl1KmwfhsmLgZ0Hd2mnTPIl2Hw3uj5+wdgGBg
-agnAZ0bajsBYgD2VGQbqjdk2Qn7Fjy3LEWIvGZx4KyZ99OJ2QxB7JOPdauURAtWA
-DKYkP4LLJxtj07DSzG8kuRWb9B47uqUD+eKDIyjfjbnzGtd9HqqzYFau7EX3HVD9
-9Qhnjl7bTZ6YfAEZ3nH2t3Vc0z76XfGh47rd0pNRhMV+xpok75asKf/lNh5mcUrr
-VKwflyMkQpSbDCmcdJ90N2xEXQ==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/DCCAuSgAwIBAgIBUDANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNjEwMTAxNzQ0NDJaFw0y
-MDAzMDUxNzQ0NDJaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
-UyBldS13ZXN0LTIgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDO
-cttLJfubB4XMMIGWNfJISkIdCMGJyOzLiMJaiWB5GYoXKhEl7YGotpy0qklwW3BQ
-a0fmVdcCLX+dIuVQ9iFK+ZcK7zwm7HtdDTCHOCKeOh2IcnU4c/VIokFi6Gn8udM6
-N/Zi5M5OGpVwLVALQU7Yctsn3c95el6MdVx6mJiIPVu7tCVZn88Z2koBQ2gq9P4O
-Sb249SHFqOb03lYDsaqy1NDsznEOhaRBw7DPJFpvmw1lA3/Y6qrExRI06H2VYR2i
-7qxwDV50N58fs10n7Ye1IOxTVJsgEA7X6EkRRXqYaM39Z76R894548WHfwXWjUsi
-MEX0RS0/t1GmnUQjvevDAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
-Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBQBxmcuRSxERYCtNnSr5xNfySokHjAfBgNV
-HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQsFAAOCAQEA
-UyCUQjsF3nUAABjfEZmpksTuUo07aT3KGYt+EMMFdejnBQ0+2lJJFGtT+CDAk1SD
-RSgfEBon5vvKEtlnTf9a3pv8WXOAkhfxnryr9FH6NiB8obISHNQNPHn0ljT2/T+I
-Y6ytfRvKHa0cu3V0NXbJm2B4KEOt4QCDiFxUIX9z6eB4Kditwu05OgQh6KcogOiP
-JesWxBMXXGoDC1rIYTFO7szwDyOHlCcVXJDNsTJhc32oDWYdeIbW7o/5I+aQsrXZ
-C96HykZcgWzz6sElrQxUaT3IoMw/5nmw4uWKKnZnxgI9bY4fpQwMeBZ96iHfFxvH
-mqfEEuC7uUoPofXdBp2ObQ==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/DCCAuSgAwIBAgIBUTANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNzA4MjUyMTM5MjZaFw0y
-MDAzMDUyMTM5MjZaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
-UyBldS13ZXN0LTMgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC+
-xmlEC/3a4cJH+UPwXCE02lC7Zq5NHd0dn6peMeLN8agb6jW4VfSY0NydjRj2DJZ8
-K7wV6sub5NUGT1NuFmvSmdbNR2T59KX0p2dVvxmXHHtIpQ9Y8Aq3ZfhmC5q5Bqgw
-tMA1xayDi7HmoPX3R8kk9ktAZQf6lDeksCvok8idjTu9tiSpDiMwds5BjMsWfyjZ
-d13PTGGNHYVdP692BSyXzSP1Vj84nJKnciW8tAqwIiadreJt5oXyrCXi8ekUMs80
-cUTuGm3aA3Q7PB5ljJMPqz0eVddaiIvmTJ9O3Ez3Du/HpImyMzXjkFaf+oNXf/Hx
-/EW5jCRR6vEiXJcDRDS7AgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
-Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBRZ9mRtS5fHk3ZKhG20Oack4cAqMTAfBgNV
-HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQsFAAOCAQEA
-F/u/9L6ExQwD73F/bhCw7PWcwwqsK1mypIdrjdIsu0JSgwWwGCXmrIspA3n3Dqxq
-sMhAJD88s9Em7337t+naar2VyLO63MGwjj+vA4mtvQRKq8ScIpiEc7xN6g8HUMsd
-gPG9lBGfNjuAZsrGJflrko4HyuSM7zHExMjXLH+CXcv/m3lWOZwnIvlVMa4x0Tz0
-A4fklaawryngzeEjuW6zOiYCzjZtPlP8Fw0SpzppJ8VpQfrZ751RDo4yudmPqoPK
-5EUe36L8U+oYBXnC5TlYs9bpVv9o5wJQI5qA9oQE2eFWxF1E0AyZ4V5sgGUBStaX
-BjDDWul0wSo7rt1Tq7XpnA==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIEATCCAumgAwIBAgIBTjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNzEyMDEwMDU1NDJaFw0y
-MDAzMDUwMDU1NDJaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJE
-UyBhcC1ub3J0aGVhc3QtMyBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAMZtQNnm/XT19mTa10ftHLzg5UhajoI65JHv4TQNdGXdsv+CQdGYU49BJ9Eu
-3bYgiEtTzR2lQe9zGMvtuJobLhOWuavzp7IixoIQcHkFHN6wJ1CvqrxgvJfBq6Hy
-EuCDCiU+PPDLUNA6XM6Qx3IpHd1wrJkjRB80dhmMSpxmRmx849uFafhN+P1QybsM
-TI0o48VON2+vj+mNuQTyLMMP8D4odSQHjaoG+zyJfJGZeAyqQyoOUOFEyQaHC3TT
-3IDSNCQlpxb9LerbCoKu79WFBBq3CS5cYpg8/fsnV2CniRBFFUumBt5z4dhw9RJU
-qlUXXO1ZyzpGd+c5v6FtrfXtnIUCAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIG
-A1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFETv7ELNplYy/xTeIOInl6nzeiHg
-MB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEBBQUA
-A4IBAQCpKxOQcd0tEKb3OtsOY8q/MPwTyustGk2Rt7t9G68idADp8IytB7M0SDRo
-wWZqynEq7orQVKdVOanhEWksNDzGp0+FPAf/KpVvdYCd7ru3+iI+V4ZEp2JFdjuZ
-Zz0PIjS6AgsZqE5Ri1J+NmfmjGZCPhsHnGZiBaenX6K5VRwwwmLN6xtoqrrfR5zL
-QfBeeZNJG6KiM3R/DxJ5rAa6Fz+acrhJ60L7HprhB7SFtj1RCijau3+ZwiGmUOMr
-yKlMv+VgmzSw7o4Hbxy1WVrA6zQsTHHSGf+vkQn2PHvnFMUEu/ZLbTDYFNmTLK91
-K6o4nMsEvhBKgo4z7H1EqqxXhvN2
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIEBDCCAuygAwIBAgIBTTANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNzEyMDYyMjQyMjdaFw0y
-MDAzMDQyMjQyMjdaMIGXMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEoMCYGA1UEAwwfQW1hem9uIFJE
-UyBwcmV2aWV3LXVzLWVhc3QtMiBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC
-AQoCggEBAMw0E8k8URanS0c/i1S7wzFf5+XC9H2bm+4pENdElGP5s9rVCybrzJaw
-6zZgVLpOFnS9mJ+sDHIMUexPjj0X4+r7wZ4+hPfy7Rmrgbt23IQwr+PIBxsKAVjj
-iaQ3bSm5WQ79an5elfQqEDdZ13ckUcLBJDA8bUDthI8m7gnteGtx0M1D0VS5PDs9
-cf96QlBia9Lx3VcNo3cc0PzP30E4j3h/Ywlb0jXUgB6oVlTxK70BjD3kZa+2xlea
-vKmm4NqGVhPY7BWd4XNdbSYsPDeZ9HxHNWXZxoHcQ7vSU8RKYVPtoBK/zIp3eWOi
-gzZlm5vYPvlkYh2pshttPPVyhZqlEZ8CAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEG
-MBIGA1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFI93K+FRhste6w3MiD+IK3Tc
-g/BsMB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEB
-BQUAA4IBAQAs4RsC8MJVOvrlRi5sgKC9LJ4BvSrrbR5V8CdIEwlPqrVOSsU5t7Py
-j8CHoPUY/ya1azlBSO62BqdZxipFuAR06NdxNG2Gy0fGl71N2udxokwEPW+IEZ81
-G6JeX8HNFjnna8ehimz1VJDDW7qborhg3dCAgEWkgv5PDR9/zoUu6bbmHPV77zbx
-Gq7Sybz5OiagC7Nj9N1WgjNXUEmlfY2DHXnJmIVgUGEVrBgu5tGcIU/bQCRznH1N
-JsBH0SalneCbSzMBhQdnzL+L5KOERibWAZvS6ebmomTBwa03kgo/T0DfEccgobTs
-rV6T9/8Vg9T18vEeqURL+LOGs7+lIKmN
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIID/TCCAuWgAwIBAgIBUjANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
-EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
-GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
-GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xODA5MjgxNzM0NTJaFw0y
-MDAzMDUxNzM0NTJaMIGQMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
-bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
-cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEhMB8GA1UEAwwYQW1hem9uIFJE
-UyBldS1ub3J0aC0xIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
-wvHfpoixHNy1jvcq/WNhXDHlsFVbEOX7mp01YQeK0wWqlpFvjs2HFJ1sRnnmyhdT
-sv4VQuXnQw2V2iFAO2HveDi8pcJ+eIXY+wloSVBytgYLTMcNpn5LmqIeyGO+Lr6p
-KUr78I4uE0mnabxyILA96CYrYtgwpLCtpEXSdSJPwOSK9nX9++molxLcJ5v4fiPS
-j46PETsbFoFdXXwYCdiJKpzO4zUAkKzzvzbF7cXg9R4noJuytjEKbluxugDHdnwl
-SctGZ3moju2I0OpPbJKUI3wHsUMtY5v15X74MOED5lbtaW5+/6JIERggve0b23Ni
-4nlYSt0Bb3z3Zwc83twCUwIDAQABo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0T
-AQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU4stOy1OAFRyvZCSKNfCiPRD+rPowHwYD
-VR0jBBgwFoAUTgLurD72FchM7Sz1BcGPnIQISYMwDQYJKoZIhvcNAQELBQADggEB
-AHpRIlKh1fqbMHl0+VnJ/52XQy1F5gM2hnw3lYkOLsDyzj9W4V6D1v2EDgYW+ZVH
-0wWqo8m0jS6CDn14W2HqNlyXyHpJK3eh3088zxvJgKqzKS4ghNzafN7axwYIwRN6
-9rrhRWy9MaFHaSPKtgiuTxw9fOekqyJdO+OYpBVEp7KEEyEG9/W5xZcU64zGb6UT
-8/g4+5t+HlT0nYBMvt8HW7w2XbFBetfKKK4WaoPKloOMN+RLO/JgJ6pVWvxM8nhC
-PbVtr43OI1sQAXYk0an7aUDgXT98vGwovWNHI6lFCMGRG+WXhauLtKRsIr4hR1LV
-fES7Q9MWPzPYHQoKELF9Jhk=
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIEBzCCAu+gAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgZQxCzAJBgNVBAYTAlVT
-MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
-DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
-MSUwIwYDVQQDDBxBbWF6b24gUkRTIGFwLWVhc3QtMSBSb290IENBMB4XDTE5MDIx
-NzAyNDcwMFoXDTIyMDYwMTEyMDAwMFowgY8xCzAJBgNVBAYTAlVTMRMwEQYDVQQI
-DApXYXNoaW5ndG9uMRAwDgYDVQQHDAdTZWF0dGxlMSIwIAYDVQQKDBlBbWF6b24g
-V2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMSAwHgYDVQQD
-DBdBbWF6b24gUkRTIGFwLWVhc3QtMSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAOcJAUofyJuBuPr5ISHi/Ha5ed8h3eGdzn4MBp6rytPOg9NVGRQs
-O93fNGCIKsUT6gPuk+1f1ncMTV8Y0Fdf4aqGWme+Khm3ZOP3V1IiGnVq0U2xiOmn
-SQ4Q7LoeQC4lC6zpoCHVJyDjZ4pAknQQfsXb77Togdt/tK5ahev0D+Q3gCwAoBoO
-DHKJ6t820qPi63AeGbJrsfNjLKiXlFPDUj4BGir4dUzjEeH7/hx37na1XG/3EcxP
-399cT5k7sY/CR9kctMlUyEEUNQOmhi/ly1Lgtihm3QfjL6K9aGLFNwX35Bkh9aL2
-F058u+n8DP/dPeKUAcJKiQZUmzuen5n57x8CAwEAAaNmMGQwDgYDVR0PAQH/BAQD
-AgEGMBIGA1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFFlqgF4FQlb9yP6c+Q3E
-O3tXv+zOMB8GA1UdIwQYMBaAFK9T6sY/PBZVbnHcNcQXf58P4OuPMA0GCSqGSIb3
-DQEBCwUAA4IBAQDeXiS3v1z4jWAo1UvVyKDeHjtrtEH1Rida1eOXauFuEQa5tuOk
-E53Os4haZCW4mOlKjigWs4LN+uLIAe1aFXGo92nGIqyJISHJ1L+bopx/JmIbHMCZ
-0lTNJfR12yBma5VQy7vzeFku/SisKwX0Lov1oHD4MVhJoHbUJYkmAjxorcIHORvh
-I3Vj5XrgDWtLDPL8/Id/roul/L+WX5ir+PGScKBfQIIN2lWdZoqdsx8YWqhm/ikL
-C6qNieSwcvWL7C03ri0DefTQMY54r5wP33QU5hJ71JoaZI3YTeT0Nf+NRL4hM++w
-Q0veeNzBQXg1f/JxfeA39IDIX1kiCf71tGlT
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIEEDCCAvigAwIBAgIJAJF3HxEqKM4lMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD
+MIIEEjCCAvqgAwIBAgIJAM2ZN/+nPi27MA0GCSqGSIb3DQEBCwUAMIGVMQswCQYD
VQQGEwJVUzEQMA4GA1UEBwwHU2VhdHRsZTETMBEGA1UECAwKV2FzaGluZ3RvbjEi
MCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1h
-em9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJEUyBhcC1lYXN0LTEgUm9vdCBDQTAe
-Fw0xOTAyMTcwMjQ2MTFaFw0yNDAyMTYwMjQ2MTFaMIGUMQswCQYDVQQGEwJVUzEQ
-MA4GA1UEBwwHU2VhdHRsZTETMBEGA1UECAwKV2FzaGluZ3RvbjEiMCAGA1UECgwZ
-QW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEl
-MCMGA1UEAwwcQW1hem9uIFJEUyBhcC1lYXN0LTEgUm9vdCBDQTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAOCVr1Yj5IW4XWa9QOLGJDSz4pqIM6BAbqQp
-gYvzIO4Lv8c8dEnuuuCY8M/zOrJ1iQJ3cDiKGa32HVBVcH+nUdXzw4Jq5jw0hsb6
-/WW2RD2aUe4jCkRD5wNzmeHM4gTgtMZnXNVHpELgKR4wVhSHEfWFTiMsZi35y8mj
-PL98Mz/m/nMnB/59EjMvcJMrsUljHO6B9BMEcvNkwvre9xza0BQWKyiVRcbOpoj1
-w4BPtYYZ+dW2QKw9AmYXwAmCLeATsxrHIJ/IbzS7obxv2QN2Eh4pJ3ghRCFv1XM9
-XVkm13oiCjj7jsxAwF7o+VggPl/GG+/Gwk+TLuaTFNAtROpPxL8CAwEAAaNjMGEw
-DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFK9T6sY/
-PBZVbnHcNcQXf58P4OuPMB8GA1UdIwQYMBaAFK9T6sY/PBZVbnHcNcQXf58P4OuP
-MA0GCSqGSIb3DQEBCwUAA4IBAQBBY+KATaT7ndYT3Ky0VWaiwNfyl1u3aDxr+MKP
-VeDhtOhlob5u0E+edOXUvEXd4A+ntS+U0HmwvtMXtQbQ2EJbsNRqZnS8KG9YB2Yc
-Q99auphW3wMjwHRtflLO5h14aa9SspqJJgcM1R7Z3pAYeq6bpBDxZSGrYtWI64q4
-h4i67qWAGDFcXSTW1kJ00GMlBCIGTeYiu8LYutdsDWzYKkeezJRjx9VR4w7A7e1G
-WmY4aUg/8aPxCioY2zEQKNl55Ghg6Dwy+6BxaV6RlV9r9EaSCai11p1bgS568WQn
-4WNQK36EGe37l2SOpDB6STrq57/rjREvmq803Ylg/Gf6qqzK
+em9uIFJEUzEmMCQGA1UEAwwdQW1hem9uIFJEUyBhZi1zb3V0aC0xIFJvb3QgQ0Ew
+HhcNMTkxMDI4MTgwNTU4WhcNMjQxMDI2MTgwNTU4WjCBlTELMAkGA1UEBhMCVVMx
+EDAOBgNVBAcMB1NlYXR0bGUxEzARBgNVBAgMCldhc2hpbmd0b24xIjAgBgNVBAoM
+GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
+JjAkBgNVBAMMHUFtYXpvbiBSRFMgYWYtc291dGgtMSBSb290IENBMIIBIjANBgkq
+hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwR2351uPMZaJk2gMGT+1sk8HE9MQh2rc
+/sCnbxGn2p1c7Oi9aBbd/GiFijeJb2BXvHU+TOq3d3Jjqepq8tapXVt4ojbTJNyC
+J5E7r7KjTktKdLxtBE1MK25aY+IRJjtdU6vG3KiPKUT1naO3xs3yt0F76WVuFivd
+9OHv2a+KHvPkRUWIxpmAHuMY9SIIMmEZtVE7YZGx5ah0iO4JzItHcbVR0y0PBH55
+arpFBddpIVHCacp1FUPxSEWkOpI7q0AaU4xfX0fe1BV5HZYRKpBOIp1TtZWvJD+X
+jGUtL1BEsT5vN5g9MkqdtYrC+3SNpAk4VtpvJrdjraI/hhvfeXNnAwIDAQABo2Mw
+YTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUEEi/
+WWMcBJsoGXg+EZwkQ0MscZQwHwYDVR0jBBgwFoAUEEi/WWMcBJsoGXg+EZwkQ0Ms
+cZQwDQYJKoZIhvcNAQELBQADggEBAGDZ5js5Pc/gC58LJrwMPXFhJDBS8QuDm23C
+FFUdlqucskwOS3907ErK1ZkmVJCIqFLArHqskFXMAkRZ2PNR7RjWLqBs+0znG5yH
+hRKb4DXzhUFQ18UBRcvT6V6zN97HTRsEEaNhM/7k8YLe7P8vfNZ28VIoJIGGgv9D
+wQBBvkxQ71oOmAG0AwaGD0ORGUfbYry9Dz4a4IcUsZyRWRMADixgrFv6VuETp26s
+/+z+iqNaGWlELBKh3iQCT6Y/1UnkPLO42bxrCSyOvshdkYN58Q2gMTE1SVTqyo8G
+Lw8lLAz9bnvUSgHzB3jRrSx6ggF/WRMRYlR++y6LXP4SAsSAaC0=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
-MIIECTCCAvGgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgZUxCzAJBgNVBAYTAlVT
-MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
-DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
-MSYwJAYDVQQDDB1BbWF6b24gUkRTIG1lLXNvdXRoLTEgUm9vdCBDQTAeFw0xOTA1
-MTAyMTU4NDNaFw0yNTA2MDExMjAwMDBaMIGQMQswCQYDVQQGEwJVUzETMBEGA1UE
-CAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9u
-IFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEhMB8GA1UE
-AwwYQW1hem9uIFJEUyBtZS1zb3V0aC0xIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC
-AQ8AMIIBCgKCAQEAudOYPZH+ihJAo6hNYMB5izPVBe3TYhnZm8+X3IoaaYiKtsp1
-JJhkTT0CEejYIQ58Fh4QrMUyWvU8qsdK3diNyQRoYLbctsBPgxBR1u07eUJDv38/
-C1JlqgHmMnMi4y68Iy7ymv50QgAMuaBqgEBRI1R6Lfbyrb2YvH5txjJyTVMwuCfd
-YPAtZVouRz0JxmnfsHyxjE+So56uOKTDuw++Ho4HhZ7Qveej7XB8b+PIPuroknd3
-FQB5RVbXRvt5ZcVD4F2fbEdBniF7FAF4dEiofVCQGQ2nynT7dZdEIPfPdH3n7ZmE
-lAOmwHQ6G83OsiHRBLnbp+QZRgOsjkHJxT20bQIDAQABo2YwZDAOBgNVHQ8BAf8E
-BAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUOEVDM7VomRH4HVdA
-QvIMNq2tXOcwHwYDVR0jBBgwFoAU54cfDjgwBx4ycBH8+/r8WXdaiqYwDQYJKoZI
-hvcNAQELBQADggEBAHhvMssj+Th8IpNePU6RH0BiL6o9c437R3Q4IEJeFdYL+nZz
-PW/rELDPvLRUNMfKM+KzduLZ+l29HahxefejYPXtvXBlq/E/9czFDD4fWXg+zVou
-uDXhyrV4kNmP4S0eqsAP/jQHPOZAMFA4yVwO9hlqmePhyDnszCh9c1PfJSBh49+b
-4w7i/L3VBOMt8j3EKYvqz0gVfpeqhJwL4Hey8UbVfJRFJMJzfNHpePqtDRAY7yjV
-PYquRaV2ab/E+/7VFkWMM4tazYz/qsYA2jSH+4xDHvYk8LnsbcrF9iuidQmEc5sb
-FgcWaSKG4DJjcI5k7AJLWcXyTDt21Ci43LE+I9Q=
+MIIEEjCCAvqgAwIBAgIJAJYM4LxvTZA6MA0GCSqGSIb3DQEBCwUAMIGVMQswCQYD
+VQQGEwJVUzEQMA4GA1UEBwwHU2VhdHRsZTETMBEGA1UECAwKV2FzaGluZ3RvbjEi
+MCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1h
+em9uIFJEUzEmMCQGA1UEAwwdQW1hem9uIFJEUyBldS1zb3V0aC0xIFJvb3QgQ0Ew
+HhcNMTkxMDMwMjAyMDM2WhcNMjQxMDI4MjAyMDM2WjCBlTELMAkGA1UEBhMCVVMx
+EDAOBgNVBAcMB1NlYXR0bGUxEzARBgNVBAgMCldhc2hpbmd0b24xIjAgBgNVBAoM
+GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
+JjAkBgNVBAMMHUFtYXpvbiBSRFMgZXUtc291dGgtMSBSb290IENBMIIBIjANBgkq
+hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqM921jXCXeqpRNCS9CBPOe5N7gMaEt+D
+s5uR3riZbqzRlHGiF1jZihkXfHAIQewDwy+Yz+Oec1aEZCQMhUHxZJPusuX0cJfj
+b+UluFqHIijL2TfXJ3D0PVLLoNTQJZ8+GAPECyojAaNuoHbdVqxhOcznMsXIXVFq
+yVLKDGvyKkJjai/iSPDrQMXufg3kWt0ISjNLvsG5IFXgP4gttsM8i0yvRd4QcHoo
+DjvH7V3cS+CQqW5SnDrGnHToB0RLskE1ET+oNOfeN9PWOxQprMOX/zmJhnJQlTqD
+QP7jcf7SddxrKFjuziFiouskJJyNDsMjt1Lf60+oHZhed2ogTeifGwIDAQABo2Mw
+YTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUFBAF
+cgJe/BBuZiGeZ8STfpkgRYQwHwYDVR0jBBgwFoAUFBAFcgJe/BBuZiGeZ8STfpkg
+RYQwDQYJKoZIhvcNAQELBQADggEBAKAYUtlvDuX2UpZW9i1QgsjFuy/ErbW0dLHU
+e/IcFtju2z6RLZ+uF+5A8Kme7IKG1hgt8s+w9TRVQS/7ukQzoK3TaN6XKXRosjtc
+o9Rm4gYWM8bmglzY1TPNaiI4HC7546hSwJhubjN0bXCuj/0sHD6w2DkiGuwKNAef
+yTu5vZhPkeNyXLykxkzz7bNp2/PtMBnzIp+WpS7uUDmWyScGPohKMq5PqvL59z+L
+ZI3CYeMZrJ5VpXUg3fNNIz/83N3G0sk7wr0ohs/kHTP7xPOYB0zD7Ku4HA0Q9Swf
+WX0qr6UQgTPMjfYDLffI7aEId0gxKw1eGYc6Cq5JAZ3ipi/cBFc=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIEEjCCAvqgAwIBAgIJANew34ehz5l8MA0GCSqGSIb3DQEBCwUAMIGVMQswCQYD
@@ -551,28 +71,28 @@ aTW6R05681Z0mvkRdb+cdXtKOSuDZPoe2wJJIaz3IlNQNSrB5TImMYgmt6iAsFhv
3vfTSTKrZDNTJn4ybG6pq1zWExoXsktZPylJly6R3RBwV6nwqBM=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
-MIIEETCCAvmgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgZQxCzAJBgNVBAYTAlVT
-MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
-DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
-MSUwIwYDVQQDDBxBbWF6b24gUkRTIEJldGEgUm9vdCAyMDE5IENBMB4XDTE5MDgy
-MDE3MTAwN1oXDTI0MDgxOTE3MzgyNlowgZkxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
-DApXYXNoaW5ndG9uMRAwDgYDVQQHDAdTZWF0dGxlMSIwIAYDVQQKDBlBbWF6b24g
-V2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMSowKAYDVQQD
-DCFBbWF6b24gUkRTIEJldGEgdXMtZWFzdC0xIDIwMTkgQ0EwggEiMA0GCSqGSIb3
-DQEBAQUAA4IBDwAwggEKAoIBAQDTNCOlotQcLP8TP82U2+nk0bExVuuMVOgFeVMx
-vbUHZQeIj9ikjk+jm6eTDnnkhoZcmJiJgRy+5Jt69QcRbb3y3SAU7VoHgtraVbxF
-QDh7JEHI9tqEEVOA5OvRrDRcyeEYBoTDgh76ROco2lR+/9uCvGtHVrMCtG7BP7ZB
-sSVNAr1IIRZZqKLv2skKT/7mzZR2ivcw9UeBBTUf8xsfiYVBvMGoEsXEycjYdf6w
-WV+7XS7teNOc9UgsFNN+9AhIBc1jvee5E//72/4F8pAttAg/+mmPUyIKtekNJ4gj
-OAR2VAzGx1ybzWPwIgOudZFHXFduxvq4f1hIRPH0KbQ/gkRrAgMBAAGjZjBkMA4G
-A1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBTkvpCD
-6C43rar9TtJoXr7q8dkrrjAfBgNVHSMEGDAWgBStoQwVpbGx87fxB3dEGDqKKnBT
-4TANBgkqhkiG9w0BAQsFAAOCAQEAJd9fOSkwB3uVdsS+puj6gCER8jqmhd3g/J5V
-Zjk9cKS8H0e8pq/tMxeJ8kpurPAzUk5RkCspGt2l0BSwmf3ahr8aJRviMX6AuW3/
-g8aKplTvq/WMNGKLXONa3Sq8591J+ce8gtOX/1rDKmFI4wQ/gUzOSYiT991m7QKS
-Fr6HMgFuz7RNJbb3Fy5cnurh8eYWA7mMv7laiLwTNsaro5qsqErD5uXuot6o9beT
-a+GiKinEur35tNxAr47ax4IRubuIzyfCrezjfKc5raVV2NURJDyKP0m0CCaffAxE
-qn2dNfYc3v1D8ypg3XjHlOzRo32RB04o8ALHMD9LSwsYDLpMag==
+MIIEBjCCAu6gAwIBAgIJAMc0ZzaSUK51MA0GCSqGSIb3DQEBCwUAMIGPMQswCQYD
+VQQGEwJVUzEQMA4GA1UEBwwHU2VhdHRsZTETMBEGA1UECAwKV2FzaGluZ3RvbjEi
+MCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1h
+em9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJEUyBSb290IDIwMTkgQ0EwHhcNMTkw
+ODIyMTcwODUwWhcNMjQwODIyMTcwODUwWjCBjzELMAkGA1UEBhMCVVMxEDAOBgNV
+BAcMB1NlYXR0bGUxEzARBgNVBAgMCldhc2hpbmd0b24xIjAgBgNVBAoMGUFtYXpv
+biBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxIDAeBgNV
+BAMMF0FtYXpvbiBSRFMgUm9vdCAyMDE5IENBMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEArXnF/E6/Qh+ku3hQTSKPMhQQlCpoWvnIthzX6MK3p5a0eXKZ
+oWIjYcNNG6UwJjp4fUXl6glp53Jobn+tWNX88dNH2n8DVbppSwScVE2LpuL+94vY
+0EYE/XxN7svKea8YvlrqkUBKyxLxTjh+U/KrGOaHxz9v0l6ZNlDbuaZw3qIWdD/I
+6aNbGeRUVtpM6P+bWIoxVl/caQylQS6CEYUk+CpVyJSkopwJlzXT07tMoDL5WgX9
+O08KVgDNz9qP/IGtAcRduRcNioH3E9v981QO1zt/Gpb2f8NqAjUUCUZzOnij6mx9
+McZ+9cWX88CRzR0vQODWuZscgI08NvM69Fn2SQIDAQABo2MwYTAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUc19g2LzLA5j0Kxc0LjZa
+pmD/vB8wHwYDVR0jBBgwFoAUc19g2LzLA5j0Kxc0LjZapmD/vB8wDQYJKoZIhvcN
+AQELBQADggEBAHAG7WTmyjzPRIM85rVj+fWHsLIvqpw6DObIjMWokpliCeMINZFV
+ynfgBKsf1ExwbvJNzYFXW6dihnguDG9VMPpi2up/ctQTN8tm9nDKOy08uNZoofMc
+NUZxKCEkVKZv+IL4oHoeayt8egtv3ujJM6V14AstMQ6SwvwvA93EP/Ug2e4WAXHu
+cbI1NAbUgVDqp+DRdfvZkgYKryjTWd/0+1fS8X1bBZVWzl7eirNVnHbSH2ZDpNuY
+0SBd8dj5F6ld3t58ydZbrTHze7JJOd8ijySAp4/kiu9UfZWuTPABzDa/DSdz9Dk/
+zPW4CXXvhLmE02TA9/HeCw3KEHIwicNuEfw=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIEEDCCAvigAwIBAgIJAKFMXyltvuRdMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD
@@ -599,30 +119,6 @@ XR/UVxMJL0Q4iVpcRS1kaNCMfqS2smbLJeNdsan8pkw1dvPhcaVTb7CvjhJtjztF
YfDzAI5794qMlWxwilKMmUvDlPPOTen8NNHkLwWvyFCH7Doh
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
-MIIEFzCCAv+gAwIBAgICFSUwDQYJKoZIhvcNAQELBQAwgZcxCzAJBgNVBAYTAlVT
-MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
-DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
-MSgwJgYDVQQDDB9BbWF6b24gUkRTIFByZXZpZXcgUm9vdCAyMDE5IENBMB4XDTE5
-MDgyMTIyMzk0N1oXDTI0MDgyMTIyMjk0OVowgZwxCzAJBgNVBAYTAlVTMRMwEQYD
-VQQIDApXYXNoaW5ndG9uMRAwDgYDVQQHDAdTZWF0dGxlMSIwIAYDVQQKDBlBbWF6
-b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMS0wKwYD
-VQQDDCRBbWF6b24gUkRTIFByZXZpZXcgdXMtZWFzdC0yIDIwMTkgQ0EwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD0dB/U7qRnSf05wOi7m10Pa2uPMTJv
-r6U/3Y17a5prq5Zr4++CnSUYarG51YuIf355dKs+7Lpzs782PIwCmLpzAHKWzix6
-pOaTQ+WZ0+vUMTxyqgqWbsBgSCyP7pVBiyqnmLC/L4az9XnscrbAX4pNaoJxsuQe
-mzBo6yofjQaAzCX69DuqxFkVTRQnVy7LCFkVaZtjNAftnAHJjVgQw7lIhdGZp9q9
-IafRt2gteihYfpn+EAQ/t/E4MnhrYs4CPLfS7BaYXBycEKC5Muj1l4GijNNQ0Efo
-xG8LSZz7SNgUvfVwiNTaqfLP3AtEAWiqxyMyh3VO+1HpCjT7uNBFtmF3AgMBAAGj
-ZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQW
-BBQtinkdrj+0B2+qdXngV2tgHnPIujAfBgNVHSMEGDAWgBRp0xqULkNh/w2ZVzEI
-o2RIY7O03TANBgkqhkiG9w0BAQsFAAOCAQEAtJdqbCxDeMc8VN1/RzCabw9BIL/z
-73Auh8eFTww/sup26yn8NWUkfbckeDYr1BrXa+rPyLfHpg06kwR8rBKyrs5mHwJx
-bvOzXD/5WTdgreB+2Fb7mXNvWhenYuji1MF+q1R2DXV3I05zWHteKX6Dajmx+Uuq
-Yq78oaCBSV48hMxWlp8fm40ANCL1+gzQ122xweMFN09FmNYFhwuW+Ao+Vv90ZfQG
-PYwTvN4n/gegw2TYcifGZC2PNX74q3DH03DXe5fvNgRW5plgz/7f+9mS+YHd5qa9
-tYTPUvoRbi169ou6jicsMKUKPORHWhiTpSCWR1FMMIbsAcsyrvtIsuaGCQ==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
MIIEFjCCAv6gAwIBAgIJAMzYZJ+R9NBVMA0GCSqGSIb3DQEBCwUAMIGXMQswCQYD
VQQGEwJVUzEQMA4GA1UEBwwHU2VhdHRsZTETMBEGA1UECAwKV2FzaGluZ3RvbjEi
MCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1h
@@ -647,6 +143,78 @@ xMTldqWFsOF3bJIlvOY0c/1EFZXu3Ns6/oCP//Ap9vumldYMUZWmbK+gK33FPOXV
8BQ6jNC29icv7lLDpRPwjibJBXX+peDR5UK4FdYcswWEB1Tix5X8dYu6
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgZUxCzAJBgNVBAYTAlVT
+MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
+DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
+MSYwJAYDVQQDDB1BbWF6b24gUkRTIGFmLXNvdXRoLTEgUm9vdCBDQTAeFw0xOTEw
+MjgxODA2NTNaFw0yNDEwMjgxODA2NTNaMIGQMQswCQYDVQQGEwJVUzETMBEGA1UE
+CAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9u
+IFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEhMB8GA1UE
+AwwYQW1hem9uIFJEUyBhZi1zb3V0aC0xIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEAvtV1OqmFa8zCVQSKOvPUJERLVFtd4rZmDpImc5rIoeBk7w/P
+9lcKUJjO8R/w1a2lJXx3oQ81tiY0Piw6TpT62YWVRMWrOw8+Vxq1dNaDSFp9I8d0
+UHillSSbOk6FOrPDp+R6AwbGFqUDebbN5LFFoDKbhNmH1BVS0a6YNKpGigLRqhka
+cClPslWtPqtjbaP3Jbxl26zWzLo7OtZl98dR225pq8aApNBwmtgA7Gh60HK/cX0t
+32W94n8D+GKSg6R4MKredVFqRTi9hCCNUu0sxYPoELuM+mHiqB5NPjtm92EzCWs+
++vgWhMc6GxG+82QSWx1Vj8sgLqtE/vLrWddf5QIDAQABo2YwZDAOBgNVHQ8BAf8E
+BAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUuLB4gYVJrSKJj/Gz
+pqc6yeA+RcAwHwYDVR0jBBgwFoAUEEi/WWMcBJsoGXg+EZwkQ0MscZQwDQYJKoZI
+hvcNAQELBQADggEBABauYOZxUhe9/RhzGJ8MsWCz8eKcyDVd4FCnY6Qh+9wcmYNT
+LtnD88LACtJKb/b81qYzcB0Em6+zVJ3Z9jznfr6buItE6es9wAoja22Xgv44BTHL
+rimbgMwpTt3uEMXDffaS0Ww6YWb3pSE0XYI2ISMWz+xRERRf+QqktSaL39zuiaW5
+tfZMre+YhohRa/F0ZQl3RCd6yFcLx4UoSPqQsUl97WhYzwAxZZfwvLJXOc4ATt3u
+VlCUylNDkaZztDJc/yN5XQoK9W5nOt2cLu513MGYKbuarQr8f+gYU8S+qOyuSRSP
+NRITzwCRVnsJE+2JmcRInn/NcanB7uOGqTvJ9+c=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgZUxCzAJBgNVBAYTAlVT
+MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
+DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
+MSYwJAYDVQQDDB1BbWF6b24gUkRTIGV1LXNvdXRoLTEgUm9vdCBDQTAeFw0xOTEw
+MzAyMDIxMzBaFw0yNDEwMzAyMDIxMzBaMIGQMQswCQYDVQQGEwJVUzETMBEGA1UE
+CAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9u
+IFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEhMB8GA1UE
+AwwYQW1hem9uIFJEUyBldS1zb3V0aC0xIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEAtEyjYcajx6xImJn8Vz1zjdmL4ANPgQXwF7+tF7xccmNAZETb
+bzb3I9i5fZlmrRaVznX+9biXVaGxYzIUIR3huQ3Q283KsDYnVuGa3mk690vhvJbB
+QIPgKa5mVwJppnuJm78KqaSpi0vxyCPe3h8h6LLFawVyWrYNZ4okli1/U582eef8
+RzJp/Ear3KgHOLIiCdPDF0rjOdCG1MOlDLixVnPn9IYOciqO+VivXBg+jtfc5J+L
+AaPm0/Yx4uELt1tkbWkm4BvTU/gBOODnYziITZM0l6Fgwvbwgq5duAtKW+h031lC
+37rEvrclqcp4wrsUYcLAWX79ZyKIlRxcAdvEhQIDAQABo2YwZDAOBgNVHQ8BAf8E
+BAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU7zPyc0azQxnBCe7D
+b9KAadH1QSEwHwYDVR0jBBgwFoAUFBAFcgJe/BBuZiGeZ8STfpkgRYQwDQYJKoZI
+hvcNAQELBQADggEBAFGaNiYxg7yC/xauXPlaqLCtwbm2dKyK9nIFbF/7be8mk7Q3
+MOA0of1vGHPLVQLr6bJJpD9MAbUcm4cPAwWaxwcNpxOjYOFDaq10PCK4eRAxZWwF
+NJRIRmGsl8NEsMNTMCy8X+Kyw5EzH4vWFl5Uf2bGKOeFg0zt43jWQVOX6C+aL3Cd
+pRS5MhmYpxMG8irrNOxf4NVFE2zpJOCm3bn0STLhkDcV/ww4zMzObTJhiIb5wSWn
+EXKKWhUXuRt7A2y1KJtXpTbSRHQxE++69Go1tWhXtRiULCJtf7wF2Ksm0RR/AdXT
+1uR1vKyH5KBJPX3ppYkQDukoHTFR0CpB+G84NLo=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgZUxCzAJBgNVBAYTAlVT
+MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
+DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
+MSYwJAYDVQQDDB1BbWF6b24gUkRTIG1lLXNvdXRoLTEgUm9vdCBDQTAeFw0xOTA1
+MTAyMTU4NDNaFw0yNTA2MDExMjAwMDBaMIGQMQswCQYDVQQGEwJVUzETMBEGA1UE
+CAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9u
+IFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEhMB8GA1UE
+AwwYQW1hem9uIFJEUyBtZS1zb3V0aC0xIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEAudOYPZH+ihJAo6hNYMB5izPVBe3TYhnZm8+X3IoaaYiKtsp1
+JJhkTT0CEejYIQ58Fh4QrMUyWvU8qsdK3diNyQRoYLbctsBPgxBR1u07eUJDv38/
+C1JlqgHmMnMi4y68Iy7ymv50QgAMuaBqgEBRI1R6Lfbyrb2YvH5txjJyTVMwuCfd
+YPAtZVouRz0JxmnfsHyxjE+So56uOKTDuw++Ho4HhZ7Qveej7XB8b+PIPuroknd3
+FQB5RVbXRvt5ZcVD4F2fbEdBniF7FAF4dEiofVCQGQ2nynT7dZdEIPfPdH3n7ZmE
+lAOmwHQ6G83OsiHRBLnbp+QZRgOsjkHJxT20bQIDAQABo2YwZDAOBgNVHQ8BAf8E
+BAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUOEVDM7VomRH4HVdA
+QvIMNq2tXOcwHwYDVR0jBBgwFoAU54cfDjgwBx4ycBH8+/r8WXdaiqYwDQYJKoZI
+hvcNAQELBQADggEBAHhvMssj+Th8IpNePU6RH0BiL6o9c437R3Q4IEJeFdYL+nZz
+PW/rELDPvLRUNMfKM+KzduLZ+l29HahxefejYPXtvXBlq/E/9czFDD4fWXg+zVou
+uDXhyrV4kNmP4S0eqsAP/jQHPOZAMFA4yVwO9hlqmePhyDnszCh9c1PfJSBh49+b
+4w7i/L3VBOMt8j3EKYvqz0gVfpeqhJwL4Hey8UbVfJRFJMJzfNHpePqtDRAY7yjV
+PYquRaV2ab/E+/7VFkWMM4tazYz/qsYA2jSH+4xDHvYk8LnsbcrF9iuidQmEc5sb
+FgcWaSKG4DJjcI5k7AJLWcXyTDt21Ci43LE+I9Q=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
MIIECDCCAvCgAwIBAgICVIYwDQYJKoZIhvcNAQELBQAwgY8xCzAJBgNVBAYTAlVT
MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
@@ -671,30 +239,6 @@ iOghbQQyAEe03MWCyDGtSmDfr0qEk+CHN+6hPiaL8qKt4s+V9P7DeK4iW08ny8Ox
AVS7u0OK/5+jKMAMrKwpYrBydOjTUTHScocyNw==
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
-MIIEBjCCAu6gAwIBAgIJAMc0ZzaSUK51MA0GCSqGSIb3DQEBCwUAMIGPMQswCQYD
-VQQGEwJVUzEQMA4GA1UEBwwHU2VhdHRsZTETMBEGA1UECAwKV2FzaGluZ3RvbjEi
-MCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEGA1UECwwKQW1h
-em9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJEUyBSb290IDIwMTkgQ0EwHhcNMTkw
-ODIyMTcwODUwWhcNMjQwODIyMTcwODUwWjCBjzELMAkGA1UEBhMCVVMxEDAOBgNV
-BAcMB1NlYXR0bGUxEzARBgNVBAgMCldhc2hpbmd0b24xIjAgBgNVBAoMGUFtYXpv
-biBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxIDAeBgNV
-BAMMF0FtYXpvbiBSRFMgUm9vdCAyMDE5IENBMIIBIjANBgkqhkiG9w0BAQEFAAOC
-AQ8AMIIBCgKCAQEArXnF/E6/Qh+ku3hQTSKPMhQQlCpoWvnIthzX6MK3p5a0eXKZ
-oWIjYcNNG6UwJjp4fUXl6glp53Jobn+tWNX88dNH2n8DVbppSwScVE2LpuL+94vY
-0EYE/XxN7svKea8YvlrqkUBKyxLxTjh+U/KrGOaHxz9v0l6ZNlDbuaZw3qIWdD/I
-6aNbGeRUVtpM6P+bWIoxVl/caQylQS6CEYUk+CpVyJSkopwJlzXT07tMoDL5WgX9
-O08KVgDNz9qP/IGtAcRduRcNioH3E9v981QO1zt/Gpb2f8NqAjUUCUZzOnij6mx9
-McZ+9cWX88CRzR0vQODWuZscgI08NvM69Fn2SQIDAQABo2MwYTAOBgNVHQ8BAf8E
-BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUc19g2LzLA5j0Kxc0LjZa
-pmD/vB8wHwYDVR0jBBgwFoAUc19g2LzLA5j0Kxc0LjZapmD/vB8wDQYJKoZIhvcN
-AQELBQADggEBAHAG7WTmyjzPRIM85rVj+fWHsLIvqpw6DObIjMWokpliCeMINZFV
-ynfgBKsf1ExwbvJNzYFXW6dihnguDG9VMPpi2up/ctQTN8tm9nDKOy08uNZoofMc
-NUZxKCEkVKZv+IL4oHoeayt8egtv3ujJM6V14AstMQ6SwvwvA93EP/Ug2e4WAXHu
-cbI1NAbUgVDqp+DRdfvZkgYKryjTWd/0+1fS8X1bBZVWzl7eirNVnHbSH2ZDpNuY
-0SBd8dj5F6ld3t58ydZbrTHze7JJOd8ijySAp4/kiu9UfZWuTPABzDa/DSdz9Dk/
-zPW4CXXvhLmE02TA9/HeCw3KEHIwicNuEfw=
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
MIIEBzCCAu+gAwIBAgICQ2QwDQYJKoZIhvcNAQELBQAwgY8xCzAJBgNVBAYTAlVT
MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
@@ -1078,3 +622,2407 @@ E1LaAUCmCZBVi9fIe0H2r9whIh4uLWZA41oMnJx/MOmo3XyMfQoWcqaSFlMqfZM4
h2XBHKxQ1Y4HgAn0jACP2QSPEmuoQEIa57bEKEcZsBR8SDY6ZdTd2HLRIApcCOSF
MRM8CKLeF658I0XgF8D5EsYoKPsA+74Z+jDH
-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEETCCAvmgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgZQxCzAJBgNVBAYTAlVT
+MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
+DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
+MSUwIwYDVQQDDBxBbWF6b24gUkRTIEJldGEgUm9vdCAyMDE5IENBMB4XDTE5MDgy
+MDE3MTAwN1oXDTI0MDgxOTE3MzgyNlowgZkxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
+DApXYXNoaW5ndG9uMRAwDgYDVQQHDAdTZWF0dGxlMSIwIAYDVQQKDBlBbWF6b24g
+V2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMSowKAYDVQQD
+DCFBbWF6b24gUkRTIEJldGEgdXMtZWFzdC0xIDIwMTkgQ0EwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDTNCOlotQcLP8TP82U2+nk0bExVuuMVOgFeVMx
+vbUHZQeIj9ikjk+jm6eTDnnkhoZcmJiJgRy+5Jt69QcRbb3y3SAU7VoHgtraVbxF
+QDh7JEHI9tqEEVOA5OvRrDRcyeEYBoTDgh76ROco2lR+/9uCvGtHVrMCtG7BP7ZB
+sSVNAr1IIRZZqKLv2skKT/7mzZR2ivcw9UeBBTUf8xsfiYVBvMGoEsXEycjYdf6w
+WV+7XS7teNOc9UgsFNN+9AhIBc1jvee5E//72/4F8pAttAg/+mmPUyIKtekNJ4gj
+OAR2VAzGx1ybzWPwIgOudZFHXFduxvq4f1hIRPH0KbQ/gkRrAgMBAAGjZjBkMA4G
+A1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBTkvpCD
+6C43rar9TtJoXr7q8dkrrjAfBgNVHSMEGDAWgBStoQwVpbGx87fxB3dEGDqKKnBT
+4TANBgkqhkiG9w0BAQsFAAOCAQEAJd9fOSkwB3uVdsS+puj6gCER8jqmhd3g/J5V
+Zjk9cKS8H0e8pq/tMxeJ8kpurPAzUk5RkCspGt2l0BSwmf3ahr8aJRviMX6AuW3/
+g8aKplTvq/WMNGKLXONa3Sq8591J+ce8gtOX/1rDKmFI4wQ/gUzOSYiT991m7QKS
+Fr6HMgFuz7RNJbb3Fy5cnurh8eYWA7mMv7laiLwTNsaro5qsqErD5uXuot6o9beT
+a+GiKinEur35tNxAr47ax4IRubuIzyfCrezjfKc5raVV2NURJDyKP0m0CCaffAxE
+qn2dNfYc3v1D8ypg3XjHlOzRo32RB04o8ALHMD9LSwsYDLpMag==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEFzCCAv+gAwIBAgICFSUwDQYJKoZIhvcNAQELBQAwgZcxCzAJBgNVBAYTAlVT
+MRAwDgYDVQQHDAdTZWF0dGxlMRMwEQYDVQQIDApXYXNoaW5ndG9uMSIwIAYDVQQK
+DBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRT
+MSgwJgYDVQQDDB9BbWF6b24gUkRTIFByZXZpZXcgUm9vdCAyMDE5IENBMB4XDTE5
+MDgyMTIyMzk0N1oXDTI0MDgyMTIyMjk0OVowgZwxCzAJBgNVBAYTAlVTMRMwEQYD
+VQQIDApXYXNoaW5ndG9uMRAwDgYDVQQHDAdTZWF0dGxlMSIwIAYDVQQKDBlBbWF6
+b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMS0wKwYD
+VQQDDCRBbWF6b24gUkRTIFByZXZpZXcgdXMtZWFzdC0yIDIwMTkgQ0EwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD0dB/U7qRnSf05wOi7m10Pa2uPMTJv
+r6U/3Y17a5prq5Zr4++CnSUYarG51YuIf355dKs+7Lpzs782PIwCmLpzAHKWzix6
+pOaTQ+WZ0+vUMTxyqgqWbsBgSCyP7pVBiyqnmLC/L4az9XnscrbAX4pNaoJxsuQe
+mzBo6yofjQaAzCX69DuqxFkVTRQnVy7LCFkVaZtjNAftnAHJjVgQw7lIhdGZp9q9
+IafRt2gteihYfpn+EAQ/t/E4MnhrYs4CPLfS7BaYXBycEKC5Muj1l4GijNNQ0Efo
+xG8LSZz7SNgUvfVwiNTaqfLP3AtEAWiqxyMyh3VO+1HpCjT7uNBFtmF3AgMBAAGj
+ZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQW
+BBQtinkdrj+0B2+qdXngV2tgHnPIujAfBgNVHSMEGDAWgBRp0xqULkNh/w2ZVzEI
+o2RIY7O03TANBgkqhkiG9w0BAQsFAAOCAQEAtJdqbCxDeMc8VN1/RzCabw9BIL/z
+73Auh8eFTww/sup26yn8NWUkfbckeDYr1BrXa+rPyLfHpg06kwR8rBKyrs5mHwJx
+bvOzXD/5WTdgreB+2Fb7mXNvWhenYuji1MF+q1R2DXV3I05zWHteKX6Dajmx+Uuq
+Yq78oaCBSV48hMxWlp8fm40ANCL1+gzQ122xweMFN09FmNYFhwuW+Ao+Vv90ZfQG
+PYwTvN4n/gegw2TYcifGZC2PNX74q3DH03DXe5fvNgRW5plgz/7f+9mS+YHd5qa9
+tYTPUvoRbi169ou6jicsMKUKPORHWhiTpSCWR1FMMIbsAcsyrvtIsuaGCQ==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQdOCSuA9psBpQd8EI368/0DANBgkqhkiG9w0BAQsFADCB
+lzELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdB
+bWF6b24gUkRTIHNhLWVhc3QtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjEwNTE5MTgwNjI2WhgPMjA2MTA1MTkxOTA2MjZaMIGXMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpv
+biBSRFMgc2EtZWFzdC0xIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN6ftL6w8v3dB2yW
+LjCxSP1D7ZsOTeLZOSCz1Zv0Gkd0XLhil5MdHOHBvwH/DrXqFU2oGzCRuAy+aZis
+DardJU6ChyIQIciXCO37f0K23edhtpXuruTLLwUwzeEPdcnLPCX+sWEn9Y5FPnVm
+pCd6J8edH2IfSGoa9LdErkpuESXdidLym/w0tWG/O2By4TabkNSmpdrCL00cqI+c
+prA8Bx1jX8/9sY0gpAovtuFaRN+Ivg3PAnWuhqiSYyQ5nC2qDparOWuDiOhpY56E
+EgmTvjwqMMjNtExfYx6Rv2Ndu50TriiNKEZBzEtkekwXInTupmYTvc7U83P/959V
+UiQ+WSMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU4uYHdH0+
+bUeh81Eq2l5/RJbW+vswDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4IB
+AQBhxcExJ+w74bvDknrPZDRgTeMLYgbVJjx2ExH7/Ac5FZZWcpUpFwWMIJJxtewI
+AnhryzM3tQYYd4CG9O+Iu0+h/VVfW7e4O3joWVkxNMb820kQSEwvZfA78aItGwOY
+WSaFNVRyloVicZRNJSyb1UL9EiJ9ldhxm4LTT0ax+4ontI7zTx6n6h8Sr6r/UOvX
+d9T5aUUENWeo6M9jGupHNn3BobtL7BZm2oS8wX8IVYj4tl0q5T89zDi2x0MxbsIV
+5ZjwqBQ5JWKv7ASGPb+z286RjPA9R2knF4lJVZrYuNV90rHvI/ECyt/JrDqeljGL
+BLl1W/UsvZo6ldLIpoMbbrb5
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEBDCCAuygAwIBAgIQUfVbqapkLYpUqcLajpTJWzANBgkqhkiG9w0BAQsFADCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIG1lLWNlbnRyYWwtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNV
+BAcMB1NlYXR0bGUwIBcNMjIwNTA2MjMyMDA5WhgPMjA2MjA1MDcwMDIwMDlaMIGa
+MQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5j
+LjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMzAxBgNVBAMMKkFt
+YXpvbiBSRFMgbWUtY2VudHJhbC0xIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJIeovu3
+ewI9FVitXMQzvkh34aQ6WyI4NO3YepfJaePiv3cnyFGYHN2S1cR3UQcLWgypP5va
+j6bfroqwGbCbZZcb+6cyOB4ceKO9Ws1UkcaGHnNDcy5gXR7aCW2OGTUfinUuhd2d
+5bOGgV7JsPbpw0bwJ156+MwfOK40OLCWVbzy8B1kITs4RUPNa/ZJnvIbiMu9rdj4
+8y7GSFJLnKCjlOFUkNI5LcaYvI1+ybuNgphT3nuu5ZirvTswGakGUT/Q0J3dxP0J
+pDfg5Sj/2G4gXiaM0LppVOoU5yEwVewhQ250l0eQAqSrwPqAkdTg9ng360zqCFPE
+JPPcgI1tdGUgneECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+/2AJVxWdZxc8eJgdpbwpW7b0f7IwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+CwUAA4IBAQBYm63jTu2qYKJ94gKnqc+oUgqmb1mTXmgmp/lXDbxonjszJDOXFbri
+3CCO7xB2sg9bd5YWY8sGKHaWmENj3FZpCmoefbUx++8D7Mny95Cz8R32rNcwsPTl
+ebpd9A/Oaw5ug6M0x/cNr0qzF8Wk9Dx+nFEimp8RYQdKvLDfNFZHjPa1itnTiD8M
+TorAqj+VwnUGHOYBsT/0NY12tnwXdD+ATWfpEHdOXV+kTMqFFwDyhfgRVNpTc+os
+ygr8SwhnSCpJPB/EYl2S7r+tgAbJOkuwUvGT4pTqrzDQEhwE7swgepnHC87zhf6l
+qN6mVpSnQKQLm6Ob5TeCEFgcyElsF5bH
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjSgAwIBAgIRAOxu0I1QuMAhIeszB3fJIlkwCgYIKoZIzj0EAwMwgZYx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1h
+em9uIFJEUyB1cy13ZXN0LTIgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTI0MjIwNjU5WhgPMjEyMTA1MjQyMzA2NTlaMIGWMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExLzAtBgNVBAMMJkFtYXpvbiBS
+RFMgdXMtd2VzdC0yIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdTZWF0dGxl
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEz4bylRcGqqDWdP7gQIIoTHdBK6FNtKH1
+4SkEIXRXkYDmRvL9Bci1MuGrwuvrka5TDj4b7e+csY0llEzHpKfq6nJPFljoYYP9
+uqHFkv77nOpJJ633KOr8IxmeHW5RXgrZo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0G
+A1UdDgQWBBQQikVz8wmjd9eDFRXzBIU8OseiGzAOBgNVHQ8BAf8EBAMCAYYwCgYI
+KoZIzj0EAwMDaAAwZQIwf06Mcrpw1O0EBLBBrp84m37NYtOkE/0Z0O+C7D41wnXi
+EQdn6PXUVgdD23Gj82SrAjEAklhKs+liO1PtN15yeZR1Io98nFve+lLptaLakZcH
++hfFuUtCqMbaI8CdvJlKnPqT
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGCTCCA/GgAwIBAgIRALyWMTyCebLZOGcZZQmkmfcwDQYJKoZIhvcNAQEMBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1ub3J0aGVhc3QtMyBSb290IENBIFJTQTQwOTYgRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNTI0MjAyODAzWhgPMjEyMTA1MjQyMTI4MDNa
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtbm9ydGhlYXN0LTMgUm9vdCBDQSBSU0E0MDk2IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA
+wGFiyDyCrGqgdn4fXG12cxKAAfVvhMea1mw5h9CVRoavkPqhzQpAitSOuMB9DeiP
+wQyqcsiGl/cTEau4L+AUBG8b9v26RlY48exUYBXj8CieYntOT9iNw5WtdYJa3kF/
+JxgI+HDMzE9cmHDs5DOO3S0uwZVyra/xE1ymfSlpOeUIOTpHRJv97CBUEpaZMUW5
+Sr6GruuOwFVpO5FX3A/jQlcS+UN4GjSRgDUJuqg6RRQldEZGCVCCmodbByvI2fGm
+reGpsPJD54KkmAX08nOR8e5hkGoHxq0m2DLD4SrOFmt65vG47qnuwplWJjtk9B3Z
+9wDoopwZLBOtlkPIkUllWm1P8EuHC1IKOA+wSP6XdT7cy8S77wgyHzR0ynxv7q/l
+vlZtH30wnNqFI0y9FeogD0TGMCHcnGqfBSicJXPy9T4fU6f0r1HwqKwPp2GArwe7
+dnqLTj2D7M9MyVtFjEs6gfGWXmu1y5uDrf+CszurE8Cycoma+OfjjuVQgWOCy7Nd
+jJswPxAroTzVfpgoxXza4ShUY10woZu0/J+HmNmqK7lh4NS75q1tz75in8uTZDkV
+be7GK+SEusTrRgcf3tlgPjSTWG3veNzFDF2Vn1GLJXmuZfhdlVQDBNXW4MNREExS
+dG57kJjICpT+r8X+si+5j51gRzkSnMYs7VHulpxfcwECAwEAAaNCMEAwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQU4JWOpDBmUBuWKvGPZelw87ezhL8wDgYDVR0P
+AQH/BAQDAgGGMA0GCSqGSIb3DQEBDAUAA4ICAQBRNLMql7itvXSEFQRAnyOjivHz
+l5IlWVQjAbOUr6ogZcwvK6YpxNAFW5zQr8F+fdkiypLz1kk5irx9TIpff0BWC9hQ
+/odMPO8Gxn8+COlSvc+dLsF2Dax3Hvz0zLeKMo+cYisJOzpdR/eKd0/AmFdkvQoM
+AOK9n0yYvVJU2IrSgeJBiiCarpKSeAktEVQ4rvyacQGr+QAPkkjRwm+5LHZKK43W
+nNnggRli9N/27qYtc5bgr3AaQEhEXMI4RxPRXCLsod0ehMGWyRRK728a+6PMMJAJ
+WHOU0x7LCEMPP/bvpLj3BdvSGqNor4ZtyXEbwREry1uzsgODeRRns5acPwTM6ff+
+CmxO2NZ0OktIUSYRmf6H/ZFlZrIhV8uWaIwEJDz71qvj7buhQ+RFDZ9CNL64C0X6
+mf0zJGEpddjANHaaVky+F4gYMtEy2K2Lcm4JGTdyIzUoIe+atzCnRp0QeIcuWtF+
+s8AjDYCVFNypcMmqbRmNpITSnOoCHSRuVkY3gutVoYyMLbp8Jm9SJnCIlEWTA6Rm
+wADOMGZJVn5/XRTRuetVOB3KlQDjs9OO01XN5NzGSZO2KT9ngAUfh9Eqhf1iRWSP
+nZlRbQ2NRCuY/oJ5N59mLGxnNJSE7giEKEBRhTQ/XEPIUYAUPD5fca0arKRJwbol
+l9Se1Hsq0ZU5f+OZKQ==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGATCCA+mgAwIBAgIRAK7vlRrGVEePJpW1VHMXdlIwDQYJKoZIhvcNAQEMBQAw
+gZgxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwo
+QW1hem9uIFJEUyBhZi1zb3V0aC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTAgFw0yMTA1MTkxOTI4NDNaGA8yMTIxMDUxOTIwMjg0M1owgZgx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwoQW1h
+em9uIFJEUyBhZi1zb3V0aC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwH
+U2VhdHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMZiHOQC6x4o
+eC7vVOMCGiN5EuLqPYHdceFPm4h5k/ZejXTf7kryk6aoKZKsDIYihkaZwXVS7Y/y
+7Ig1F1ABi2jD+CYprj7WxXbhpysmN+CKG7YC3uE4jSvfvUnpzionkQbjJsRJcrPO
+cZJM4FVaVp3mlHHtvnM+K3T+ni4a38nAd8xrv1na4+B8ZzZwWZXarfg8lJoGskSn
+ou+3rbGQ0r+XlUP03zWujHoNlVK85qUIQvDfTB7n3O4s1XNGvkfv3GNBhYRWJYlB
+4p8T+PFN8wG+UOByp1gV7BD64RnpuZ8V3dRAlO6YVAmINyG5UGrPzkIbLtErUNHO
+4iSp4UqYvztDqJWWHR/rA84ef+I9RVwwZ8FQbjKq96OTnPrsr63A5mXTC9dXKtbw
+XNJPQY//FEdyM3K8sqM0IdCzxCA1MXZ8+QapWVjwyTjUwFvL69HYky9H8eAER59K
+5I7u/CWWeCy2R1SYUBINc3xxLr0CGGukcWPEZW2aPo5ibW5kepU1P/pzdMTaTfao
+F42jSFXbc7gplLcSqUgWwzBnn35HLTbiZOFBPKf6vRRu8aRX9atgHw/EjCebi2xP
+xIYr5Ub8u0QVHIqcnF1/hVzO/Xz0chj3E6VF/yTXnsakm+W1aM2QkZbFGpga+LMy
+mFCtdPrELjea2CfxgibaJX1Q4rdEpc8DAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wHQYDVR0OBBYEFDSaycEyuspo/NOuzlzblui8KotFMA4GA1UdDwEB/wQEAwIB
+hjANBgkqhkiG9w0BAQwFAAOCAgEAbosemjeTRsL9o4v0KadBUNS3V7gdAH+X4vH2
+Ee1Jc91VOGLdd/s1L9UX6bhe37b9WjUD69ur657wDW0RzxMYgQdZ27SUl0tEgGGp
+cCmVs1ky3zEN+Hwnhkz+OTmIg1ufq0W2hJgJiluAx2r1ib1GB+YI3Mo3rXSaBYUk
+bgQuujYPctf0PA153RkeICE5GI3OaJ7u6j0caYEixBS3PDHt2MJWexITvXGwHWwc
+CcrC05RIrTUNOJaetQw8smVKYOfRImEzLLPZ5kf/H3Cbj8BNAFNsa10wgvlPuGOW
+XLXqzNXzrG4V3sjQU5YtisDMagwYaN3a6bBf1wFwFIHQoAPIgt8q5zaQ9WI+SBns
+Il6rd4zfvjq/BPmt0uI7rVg/cgbaEg/JDL2neuM9CJAzmKxYxLQuHSX2i3Fy4Y1B
+cnxnRQETCRZNPGd00ADyxPKVoYBC45/t+yVusArFt+2SVLEGiFBr23eG2CEZu+HS
+nDEgIfQ4V3YOTUNa86wvbAss1gbbnT/v1XCnNGClEWCWNCSRjwV2ZmQ/IVTmNHPo
+7axTTBBJbKJbKzFndCnuxnDXyytdYRgFU7Ly3sa27WS2KFyFEDebLFRHQEfoYqCu
+IupSqBSbXsR3U10OTjc9z6EPo1nuV6bdz+gEDthmxKa1NI+Qb1kvyliXQHL2lfhr
+5zT5+Bs=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/zCCA+egAwIBAgIRAOLV6zZcL4IV2xmEneN1GwswDQYJKoZIhvcNAQEMBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyB1cy13ZXN0LTEgUm9vdCBDQSBSU0E0MDk2IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUxOTE5MDg1OFoYDzIxMjEwNTE5MjAwODU4WjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIHVzLXdlc3QtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC7koAKGXXlLixN
+fVjhuqvz0WxDeTQfhthPK60ekRpftkfE5QtnYGzeovaUAiS58MYVzqnnTACDwcJs
+IGTFE6Wd7sB6r8eI/3CwI1pyJfxepubiQNVAQG0zJETOVkoYKe/5KnteKtnEER3X
+tCBRdV/rfbxEDG9ZAsYfMl6zzhEWKF88G6xhs2+VZpDqwJNNALvQuzmTx8BNbl5W
+RUWGq9CQ9GK9GPF570YPCuURW7kl35skofudE9bhURNz51pNoNtk2Z3aEeRx3ouT
+ifFJlzh+xGJRHqBG7nt5NhX8xbg+vw4xHCeq1aAe6aVFJ3Uf9E2HzLB4SfIT9bRp
+P7c9c0ySGt+3n+KLSHFf/iQ3E4nft75JdPjeSt0dnyChi1sEKDi0tnWGiXaIg+J+
+r1ZtcHiyYpCB7l29QYMAdD0TjfDwwPayLmq//c20cPmnSzw271VwqjUT0jYdrNAm
+gV+JfW9t4ixtE3xF2jaUh/NzL3bAmN5v8+9k/aqPXlU1BgE3uPwMCjrfn7V0I7I1
+WLpHyd9jF3U/Ysci6H6i8YKgaPiOfySimQiDu1idmPld659qerutUSemQWmPD3bE
+dcjZolmzS9U0Ujq/jDF1YayN3G3xvry1qWkTci0qMRMu2dZu30Herugh9vsdTYkf
+00EqngPbqtIVLDrDjEQLqPcb8QvWFQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBQBqg8Za/L0YMHURGExHfvPyfLbOTAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQEMBQADggIBACAGPMa1QL7P/FIO7jEtMelJ0hQlQepKnGtbKz4r
+Xq1bUX1jnLvnAieR9KZmeQVuKi3g3CDU6b0mDgygS+FL1KDDcGRCSPh238Ou8KcG
+HIxtt3CMwMHMa9gmdcMlR5fJF9vhR0C56KM2zvyelUY51B/HJqHwGvWuexryXUKa
+wq1/iK2/d9mNeOcjDvEIj0RCMI8dFQCJv3PRCTC36XS36Tzr6F47TcTw1c3mgKcs
+xpcwt7ezrXMUunzHS4qWAA5OGdzhYlcv+P5GW7iAA7TDNrBF+3W4a/6s9v2nQAnX
+UvXd9ul0ob71377UhZbJ6SOMY56+I9cJOOfF5QvaL83Sz29Ij1EKYw/s8TYdVqAq
++dCyQZBkMSnDFLVe3J1KH2SUSfm3O98jdPORQrUlORQVYCHPls19l2F6lCmU7ICK
+hRt8EVSpXm4sAIA7zcnR2nU00UH8YmMQLnx5ok9YGhuh3Ehk6QlTQLJux6LYLskd
+9YHOLGW/t6knVtV78DgPqDeEx/Wu/5A8R0q7HunpWxr8LCPBK6hksZnOoUhhb8IP
+vl46Ve5Tv/FlkyYr1RTVjETmg7lb16a8J0At14iLtpZWmwmuv4agss/1iBVMXfFk
++ZGtx5vytWU5XJmsfKA51KLsMQnhrLxb3X3zC+JRCyJoyc8++F3YEcRi2pkRYE3q
+Hing
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/zCCAuegAwIBAgIRAI+asxQA/MB1cGyyrC0MPpkwDQYJKoZIhvcNAQELBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyBjYS13ZXN0LTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIzMDkxMzIwMjEzNFoYDzIwNjMwOTEzMjEyMTMzWjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGNhLXdlc3QtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDMHvQITTZcfl2O
+yfzRIAPKwzzlc8eXWdXef7VUsbezg3lm9RC+vArO4JuAzta/aLw1D94wPSRm9JXX
+NkP3obO6Ql80/0doooU6BAPceD0xmEWC4aCFT/5KWsD6Sy2/Rjwq3NKBTwzxLwYK
+GqVsBp8AdrzDTmdRETC+Dg2czEo32mTDAA1uMgqrz6xxeTYroj8NTSTp6jfE6C0n
+YgzYmVQCEIjHqI49j7k3jfT3P2skCVKGJwQzoZnerFacKzXsDB18uIqU7NaMc2cX
+kOd0gRqpyKOzAHU2m5/S4jw4UHdkoI3E7nkayuen8ZPKH2YqWtTXUrXGhSTT34nX
+yiFgu+vTAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHzz1NTd
+TOm9zAv4d8l6XCFKSdJfMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOC
+AQEAodBvd0cvXQYhFBef2evnuI9XA+AC/Q9P1nYtbp5MPA4aFhy5v9rjW8wwJX14
+l+ltd2o3tz8PFDBZ1NX2ooiWVlZthQxKn1/xDVKsTXHbYUXItPQ3jI5IscB5IML8
+oCzAbkoLXsSPNOVFP5P4l4cZEMqHGRnBag7hLJZvmvzZSBnz+ioC2jpjVluF8kDX
+fQGNjqPECik68CqbSV0SaQ0cgEoYTDjwON5ZLBeS8sxR2abE/gsj4VFYl5w/uEBd
+w3Tt9uGfIy+wd2tNj6isGC6PcbPMjA31jd+ifs2yNzigqkcYTTWFtnvh4a8xiecm
+GHu2EgH0Jqzz500N7L3uQdPkdg==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIRANxgyBbnxgTEOpDul2ZnC0UwDQYJKoZIhvcNAQELBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1zb3V0aGVhc3QtMyBSb290IENBIFJTQTIwNDggRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNjEwMTgxOTA3WhgPMjA2MTA2MTAxOTE5MDda
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtc291dGhlYXN0LTMgUm9vdCBDQSBSU0EyMDQ4IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
+xnwSDAChrMkfk5TA4Dk8hKzStDlSlONzmd3fTG0Wqr5+x3EmFT6Ksiu/WIwEl9J2
+K98UI7vYyuZfCxUKb1iMPeBdVGqk0zb92GpURd+Iz/+K1ps9ZLeGBkzR8mBmAi1S
+OfpwKiTBzIv6E8twhEn4IUpHsdcuX/2Y78uESpJyM8O5CpkG0JaV9FNEbDkJeBUQ
+Ao2qqNcH4R0Qcr5pyeqA9Zto1RswgL06BQMI9dTpfwSP5VvkvcNUaLl7Zv5WzLQE
+JzORWePvdPzzvWEkY/3FPjxBypuYwssKaERW0fkPDmPtykktP9W/oJolKUFI6pXp
+y+Y6p6/AVdnQD2zZjW5FhQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBT+jEKs96LC+/X4BZkUYUkzPfXdqTAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZI
+hvcNAQELBQADggEBAIGQqgqcQ6XSGkmNebzR6DhadTbfDmbYeN5N0Vuzv+Tdmufb
+tMGjdjnYMg4B+IVnTKQb+Ox3pL9gbX6KglGK8HupobmIRtwKVth+gYYz3m0SL/Nk
+haWPYzOm0x3tJm8jSdufJcEob4/ATce9JwseLl76pSWdl5A4lLjnhPPKudUDfH+1
+BLNUi3lxpp6GkC8aWUPtupnhZuXddolTLOuA3GwTZySI44NfaFRm+o83N1jp+EwD
+6e94M4cTRzjUv6J3MZmSbdtQP/Tk1uz2K4bQZGP0PZC3bVpqiesdE/xr+wbu8uHr
+cM1JXH0AmXf1yIkTgyWzmvt0k1/vgcw5ixAqvvE=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEATCCAumgAwIBAgIRAMhw98EQU18mIji+unM2YH8wDQYJKoZIhvcNAQELBQAw
+gZgxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwo
+QW1hem9uIFJEUyBhcC1zb3V0aC0yIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTAgFw0yMjA2MDYyMTQyMjJaGA8yMDYyMDYwNjIyNDIyMlowgZgx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwoQW1h
+em9uIFJEUyBhcC1zb3V0aC0yIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4GA1UEBwwH
+U2VhdHRsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAIeeRoLfTm+7
+vqm7ZlFSx+1/CGYHyYrOOryM4/Z3dqYVHFMgWTR7V3ziO8RZ6yUanrRcWVX3PZbF
+AfX0KFE8OgLsXEZIX8odSrq86+/Th5eZOchB2fDBsUB7GuN2rvFBbM8lTI9ivVOU
+lbuTnYyb55nOXN7TpmH2bK+z5c1y9RVC5iQsNAl6IJNvSN8VCqXh31eK5MlKB4DT
++Y3OivCrSGsjM+UR59uZmwuFB1h+icE+U0p9Ct3Mjq3MzSX5tQb6ElTNGlfmyGpW
+Kh7GQ5XU1KaKNZXoJ37H53woNSlq56bpVrKI4uv7ATpdpFubOnSLtpsKlpLdR3sy
+Ws245200pC8CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUp0ki
+6+eWvsnBjQhMxwMW5pwn7DgwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUA
+A4IBAQB2V8lv0aqbYQpj/bmVv/83QfE4vOxKCJAHv7DQ35cJsTyBdF+8pBczzi3t
+3VNL5IUgW6WkyuUOWnE0eqAFOUVj0yTS1jSAtfl3vOOzGJZmWBbqm9BKEdu1D8O6
+sB8bnomwiab2tNDHPmUslpdDqdabbkWwNWzLJ97oGFZ7KNODMEPXWKWNxg33iHfS
+/nlmnrTVI3XgaNK9qLZiUrxu9Yz5gxi/1K+sG9/Dajd32ZxjRwDipOLiZbiXQrsd
+qzIMY4GcWf3g1gHL5mCTfk7dG22h/rhPyGV0svaDnsb+hOt6sv1McMN6Y3Ou0mtM
+/UaAXojREmJmTSCNvs2aBny3/2sy
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjSgAwIBAgIRAMnRxsKLYscJV8Qv5pWbL7swCgYIKoZIzj0EAwMwgZYx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1h
+em9uIFJEUyBzYS1lYXN0LTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTE5MTgxNjAxWhgPMjEyMTA1MTkxOTE2MDFaMIGWMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExLzAtBgNVBAMMJkFtYXpvbiBS
+RFMgc2EtZWFzdC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdTZWF0dGxl
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEjFOCZgTNVKxLKhUxffiDEvTLFhrmIqdO
+dKqVdgDoELEzIHWDdC+19aDPitbCYtBVHl65ITu/9pn6mMUl5hhUNtfZuc6A+Iw1
+sBe0v0qI3y9Q9HdQYrGgeHDh8M5P7E2ho0IwQDAPBgNVHRMBAf8EBTADAQH/MB0G
+A1UdDgQWBBS5L7/8M0TzoBZk39Ps7BkfTB4yJTAOBgNVHQ8BAf8EBAMCAYYwCgYI
+KoZIzj0EAwMDaAAwZQIwI43O0NtWKTgnVv9z0LO5UMZYgSve7GvGTwqktZYCMObE
+rUI4QerXM9D6JwLy09mqAjEAypfkdLyVWtaElVDUyHFkihAS1I1oUxaaDrynLNQK
+Ou/Ay+ns+J+GyvyDUjBpVVW1
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/jCCA+agAwIBAgIQR71Z8lTO5Sj+as2jB7IWXzANBgkqhkiG9w0BAQwFADCB
+lzELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdB
+bWF6b24gUkRTIHVzLXdlc3QtMiBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjEwNTI0MjIwMzIwWhgPMjEyMTA1MjQyMzAzMjBaMIGXMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpv
+biBSRFMgdXMtd2VzdC0yIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAM977bHIs1WJijrS
+XQMfUOhmlJjr2v0K0UjPl52sE1TJ76H8umo1yR4T7Whkd9IwBHNGKXCJtJmMr9zp
+fB38eLTu+5ydUAXdFuZpRMKBWwPVe37AdJRKqn5beS8HQjd3JXAgGKUNNuE92iqF
+qi2fIqFMpnJXWo0FIW6s2Dl2zkORd7tH0DygcRi7lgVxCsw1BJQhFJon3y+IV8/F
+bnbUXSNSDUnDW2EhvWSD8L+t4eiXYsozhDAzhBvojpxhPH9OB7vqFYw5qxFx+G0t
+lSLX5iWi1jzzc3XyGnB6WInZDVbvnvJ4BGZ+dTRpOCvsoMIn9bz4EQTvu243c7aU
+HbS/kvnCASNt+zk7C6lbmaq0AGNztwNj85Opn2enFciWZVnnJ/4OeefUWQxD0EPp
+SjEd9Cn2IHzkBZrHCg+lWZJQBKbUVS0lLIMSsLQQ6WvR38jY7D2nxM1A93xWxwpt
+ZtQnYRCVXH6zt2OwDAFePInWwxUjR5t/wu3XxPgpSfrmTi3WYtr1wFypAJ811e/P
+yBtswWUQ6BNJQvy+KnOEeGfOwmtdDFYR+GOCfvCihzrKJrxOtHIieehR5Iw3cbXG
+sm4pDzfMUVvDDz6C2M6PRlJhhClbatHCjik9hxFYEsAlqtVVK9pxaz9i8hOqSFQq
+kJSQsgWw+oM/B2CyjcSqkSQEu8RLAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8w
+HQYDVR0OBBYEFPmrdxpRRgu3IcaB5BTqlprcKdTsMA4GA1UdDwEB/wQEAwIBhjAN
+BgkqhkiG9w0BAQwFAAOCAgEAVdlxWjPvVKky3kn8ZizeM4D+EsLw9dWLau2UD/ls
+zwDCFoT6euagVeCknrn+YEl7g20CRYT9iaonGoMUPuMR/cdtPL1W/Rf40PSrGf9q
+QuxavWiHLEXOQTCtCaVZMokkvjuuLNDXyZnstgECuiZECTwhexUF4oiuhyGk9o01
+QMaiz4HX4lgk0ozALUvEzaNd9gWEwD2qe+rq9cQMTVq3IArUkvTIftZUaVUMzr0O
+ed1+zAsNa9nJhURJ/6anJPJjbQgb5qA1asFcp9UaMT1ku36U3gnR1T/BdgG2jX3X
+Um0UcaGNVPrH1ukInWW743pxWQb7/2sumEEMVh+jWbB18SAyLI4WIh4lkurdifzS
+IuTFp8TEx+MouISFhz/vJDWZ84tqoLVjkEcP6oDypq9lFoEzHDJv3V1CYcIgOusT
+k1jm9P7BXdTG7TYzUaTb9USb6bkqkD9EwJAOSs7DI94aE6rsSws2yAHavjAMfuMZ
+sDAZvkqS2Qg2Z2+CI6wUZn7mzkJXbZoqRjDvChDXEB1mIhzVXhiNW/CR5WKVDvlj
+9v1sdGByh2pbxcLQtVaq/5coM4ANgphoNz3pOYUPWHS+JUrIivBZ+JobjXcxr3SN
+9iDzcu5/FVVNbq7+KN/nvPMngT+gduEN5m+EBjm8GukJymFG0m6BENRA0QSDqZ7k
+zDY=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIRAK5EYG3iHserxMqgg+0EFjgwDQYJKoZIhvcNAQELBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1ub3J0aGVhc3QtMyBSb290IENBIFJTQTIwNDggRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNTI0MjAyMzE2WhgPMjA2MTA1MjQyMTIzMTZa
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtbm9ydGhlYXN0LTMgUm9vdCBDQSBSU0EyMDQ4IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
+s1L6TtB84LGraLHVC+rGPhLBW2P0oN/91Rq3AnYwqDOuTom7agANwEjvLq7dSRG/
+sIfZsSV/ABTgArZ5sCmLjHFZAo8Kd45yA9byx20RcYtAG8IZl+q1Cri+s0XefzyO
+U6mlfXZkVe6lzjlfXBkrlE/+5ifVbJK4dqOS1t9cWIpgKqv5fbE6Qbq4LVT+5/WM
+Vd2BOljuBMGMzdZubqFKFq4mzTuIYfnBm7SmHlZfTdfBYPP1ScNuhpjuzw4n3NCR
+EdU6dQv04Q6th4r7eiOCwbWI9LkmVbvBe3ylhH63lApC7MiiPYLlB13xBubVHVhV
+q1NHoNTi+zA3MN9HWicRxQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBSuxoqm0/wjNiZLvqv+JlQwsDvTPDAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZI
+hvcNAQELBQADggEBAFfTK/j5kv90uIbM8VaFdVbr/6weKTwehafT0pAk1bfLVX+7
+uf8oHgYiyKTTl0DFQicXejghXTeyzwoEkWSR8c6XkhD5vYG3oESqmt/RGvvoxz11
+rHHy7yHYu7RIUc3VQG60c4qxXv/1mWySGwVwJrnuyNT9KZXPevu3jVaWOVHEILaK
+HvzQ2YEcWBPmde/zEseO2QeeGF8FL45Q1d66wqIP4nNUd2pCjeTS5SpB0MMx7yi9
+ki1OH1pv8tOuIdimtZ7wkdB8+JSZoaJ81b8sRrydRwJyvB88rftuI3YB4WwGuONT
+ZezUPsmaoK69B0RChB0ofDpAaviF9V3xOWvVZfo=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGDzCCA/egAwIBAgIRAI0sMNG2XhaBMRN3zD7ZyoEwDQYJKoZIhvcNAQEMBQAw
+gZ8xCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE4MDYGA1UEAwwv
+QW1hem9uIFJEUyBQcmV2aWV3IHVzLWVhc3QtMiBSb290IENBIFJTQTQwOTYgRzEx
+EDAOBgNVBAcMB1NlYXR0bGUwIBcNMjEwNTE4MjA1NzUwWhgPMjEyMTA1MTgyMTU3
+NTBaMIGfMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
+cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExODA2BgNV
+BAMML0FtYXpvbiBSRFMgUHJldmlldyB1cy1lYXN0LTIgUm9vdCBDQSBSU0E0MDk2
+IEcxMRAwDgYDVQQHDAdTZWF0dGxlMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAh/otSiCu4Uw3hu7OJm0PKgLsLRqBmUS6jihcrkxfN2SHmp2zuRflkweU
+BhMkebzL+xnNvC8okzbgPWtUxSmDnIRhE8J7bvSKFlqs/tmEdiI/LMqe/YIKcdsI
+20UYmvyLIjtDaJIh598SHHlF9P8DB5jD8snJfhxWY+9AZRN+YVTltgQAAgayxkWp
+M1BbvxpOnz4CC00rE0eqkguXIUSuobb1vKqdKIenlYBNxm2AmtgvQfpsBIQ0SB+8
+8Zip8Ef5rtjSw5J3s2Rq0aYvZPfCVIsKYepIboVwXtD7E9J31UkB5onLBQlaHaA6
+XlH4srsMmrew5d2XejQGy/lGZ1nVWNsKO0x/Az2QzY5Kjd6AlXZ8kq6H68hscA5i
+OMbNlXzeEQsZH0YkId3+UsEns35AAjZv4qfFoLOu8vDotWhgVNT5DfdbIWZW3ZL8
+qbmra3JnCHuaTwXMnc25QeKgVq7/rG00YB69tCIDwcf1P+tFJWxvaGtV0g2NthtB
+a+Xo09eC0L53gfZZ3hZw1pa3SIF5dIZ6RFRUQ+lFOux3Q/I3u+rYstYw7Zxc4Zeo
+Y8JiedpQXEAnbw2ECHix/L6mVWgiWCiDzBnNLLdbmXjJRnafNSndSfFtHCnY1SiP
+aCrNpzwZIJejoV1zDlWAMO+gyS28EqzuIq3WJK/TFE7acHkdKIcCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUrmV1YASnuudfmqAZP4sKGTvScaEw
+DgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBDAUAA4ICAQBGpEKeQoPvE85tN/25
+qHFkys9oHDl93DZ62EnOqAUKLd6v0JpCyEiop4nlrJe+4KrBYVBPyKOJDcIqE2Sp
+3cvgJXLhY4i46VM3Qxe8yuYF1ElqBpg3jJVj/sCQnYz9dwoAMWIJFaDWOvmU2E7M
+MRaKx+sPXFkIjiDA6Bv0m+VHef7aedSYIY7IDltEQHuXoqNacGrYo3I50R+fZs88
+/mB3e/V7967e99D6565yf9Lcjw4oQf2Hy7kl/6P9AuMz0LODnGITwh2TKk/Zo3RU
+Vgq25RDrT4xJK6nFHyjUF6+4cOBxVpimmFw/VP1zaXT8DN5r4HyJ9p4YuSK8ha5N
+2pJc/exvU8Nv2+vS/efcDZWyuEdZ7eh1IJWQZlOZKIAONfRDRTpeQHJ3zzv3QVYy
+t78pYp/eWBHyVIfEE8p2lFKD4279WYe+Uvdb8c4Jm4TJwqkSJV8ifID7Ub80Lsir
+lPAU3OCVTBeVRFPXT2zpC4PB4W6KBSuj6OOcEu2y/HgWcoi7Cnjvp0vFTUhDFdus
+Wz3ucmJjfVsrkEO6avDKu4SwdbVHsk30TVAwPd6srIdi9U6MOeOQSOSE4EsrrS7l
+SVmu2QIDUVFpm8QAHYplkyWIyGkupyl3ashH9mokQhixIU/Pzir0byePxHLHrwLu
+1axqeKpI0F5SBUPsaVNYY2uNFg==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECDCCAvCgAwIBAgIQCREfzzVyDTMcNME+gWnTCTANBgkqhkiG9w0BAQsFADCB
+nDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTUwMwYDVQQDDCxB
+bWF6b24gUkRTIGFwLXNvdXRoZWFzdC0yIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4G
+A1UEBwwHU2VhdHRsZTAgFw0yMTA1MjQyMDQyMzNaGA8yMDYxMDUyNDIxNDIzM1ow
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1zb3V0aGVhc3QtMiBSb290IENBIFJTQTIwNDggRzExEDAO
+BgNVBAcMB1NlYXR0bGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDL
+1MT6br3L/4Pq87DPXtcjlXN3cnbNk2YqRAZHJayStTz8VtsFcGPJOpk14geRVeVk
+e9uKFHRbcyr/RM4owrJTj5X4qcEuATYZbo6ou/rW2kYzuWFZpFp7lqm0vasV4Z9F
+fChlhwkNks0UbM3G+psCSMNSoF19ERunj7w2c4E62LwujkeYLvKGNepjnaH10TJL
+2krpERd+ZQ4jIpObtRcMH++bTrvklc+ei8W9lqrVOJL+89v2piN3Ecdd389uphst
+qQdb1BBVXbhUrtuGHgVf7zKqN1SkCoktoWxVuOprVWhSvr7akaWeq0UmlvbEsujU
+vADqxGMcJFyCzxx3CkJjAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFFk8UJmlhoxFT3PP12PvhvazHjT4MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQsFAAOCAQEAfFtr2lGoWVXmWAsIo2NYre7kzL8Xb9Tx7desKxCCz5HOOvIr
+8JMB1YK6A7IOvQsLJQ/f1UnKRh3X3mJZjKIywfrMSh0FiDf+rjcEzXxw2dGtUem4
+A+WMvIA3jwxnJ90OQj5rQ8bg3iPtE6eojzo9vWQGw/Vu48Dtw1DJo9210Lq/6hze
+hPhNkFh8fMXNT7Q1Wz/TJqJElyAQGNOXhyGpHKeb0jHMMhsy5UNoW5hLeMS5ffao
+TBFWEJ1gVfxIU9QRxSh+62m46JIg+dwDlWv8Aww14KgepspRbMqDuaM2cinoejv6
+t3dyOyHHrsOyv3ffZUKtQhQbQr+sUcL89lARsg==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/zCCAuegAwIBAgIRAIJLTMpzGNxqHZ4t+c1MlCIwDQYJKoZIhvcNAQELBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyBhcC1lYXN0LTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyNTIxMzAzM1oYDzIwNjEwNTI1MjIzMDMzWjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGFwLWVhc3QtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtdHut0ZhJ9Nn2
+MpVafFcwHdoEzx06okmmhjJsNy4l9QYVeh0UUoek0SufRNMRF4d5ibzpgZol0Y92
+/qKWNe0jNxhEj6sXyHsHPeYtNBPuDMzThfbvsLK8z7pBP7vVyGPGuppqW/6m4ZBB
+lcc9fsf7xpZ689iSgoyjiT6J5wlVgmCx8hFYc/uvcRtfd8jAHvheug7QJ3zZmIye
+V4htOW+fRVWnBjf40Q+7uTv790UAqs0Zboj4Yil+hER0ibG62y1g71XcCyvcVpto
+2/XW7Y9NCgMNqQ7fGN3wR1gjtSYPd7DO32LTzYhutyvfbpAZjsAHnoObmoljcgXI
+QjfBcCFpAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFJI3aWLg
+CS5xqU5WYVaeT5s8lpO0MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOC
+AQEAUwATpJOcGVOs3hZAgJwznWOoTzOVJKfrqBum7lvkVH1vBwxBl9CahaKj3ZOt
+YYp2qJzhDUWludL164DL4ZjS6eRedLRviyy5cRy0581l1MxPWTThs27z+lCC14RL
+PJZNVYYdl7Jy9Q5NsQ0RBINUKYlRY6OqGDySWyuMPgno2GPbE8aynMdKP+f6G/uE
+YHOf08gFDqTsbyfa70ztgVEJaRooVf5JJq4UQtpDvVswW2reT96qi6tXPKHN5qp3
+3wI0I1Mp4ePmiBKku2dwYzPfrJK/pQlvu0Gu5lKOQ65QdotwLAAoaFqrf9za1yYs
+INUkHLWIxDds+4OHNYcerGp5Dw==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGCTCCA/GgAwIBAgIRAIO6ldra1KZvNWJ0TA1ihXEwDQYJKoZIhvcNAQEMBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1zb3V0aGVhc3QtMSBSb290IENBIFJTQTQwOTYgRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNTIxMjE0NTA1WhgPMjEyMTA1MjEyMjQ1MDVa
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtc291dGhlYXN0LTEgUm9vdCBDQSBSU0E0MDk2IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA
+sDN52Si9pFSyZ1ruh3xAN0nVqEs960o2IK5CPu/ZfshFmzAwnx/MM8EHt/jMeZtj
+SM58LADAsNDL01ELpFZATjgZQ6xNAyXRXE7RiTRUvNkK7O3o2qAGbLnJq/UqF7Sw
+LRnB8V6hYOv+2EjVnohtGCn9SUFGZtYDjWXsLd4ML4Zpxv0a5LK7oEC7AHzbUR7R
+jsjkrXqSv7GE7bvhSOhMkmgxgj1F3J0b0jdQdtyyj109aO0ATUmIvf+Bzadg5AI2
+A9UA+TUcGeebhpHu8AP1Hf56XIlzPpaQv3ZJ4vzoLaVNUC7XKzAl1dlvCl7Klg/C
+84qmbD/tjZ6GHtzpLKgg7kQEV7mRoXq8X4wDX2AFPPQl2fv+Kbe+JODqm5ZjGegm
+uskABBi8IFv1hYx9jEulZPxC6uD/09W2+niFm3pirnlWS83BwVDTUBzF+CooUIMT
+jhWkIIZGDDgMJTzouBHfoSJtS1KpUZi99m2WyVs21MNKHeWAbs+zmI6TO5iiMC+T
+uB8spaOiHFO1573Fmeer4sy3YA6qVoqVl6jjTQqOdy3frAMbCkwH22/crV8YA+08
+hLeHXrMK+6XUvU+EtHAM3VzcrLbuYJUI2XJbzTj5g0Eb8I8JWsHvWHR5K7Z7gceR
+78AzxQmoGEfV6KABNWKsgoCQnfb1BidDJIe3BsI0A6UCAwEAAaNCMEAwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUABp0MlB14MSHgAcuNSOhs3MOlUcwDgYDVR0P
+AQH/BAQDAgGGMA0GCSqGSIb3DQEBDAUAA4ICAQCv4CIOBSQi/QR9NxdRgVAG/pAh
+tFJhV7OWb/wqwsNKFDtg6tTxwaahdCfWpGWId15OUe7G9LoPiKiwM9C92n0ZeHRz
+4ewbrQVo7Eu1JI1wf0rnZJISL72hVYKmlvaWaacHhWxvsbKLrB7vt6Cknxa+S993
+Kf8i2Psw8j5886gaxhiUtzMTBwoDWak8ZaK7m3Y6C6hXQk08+3pnIornVSFJ9dlS
+PAqt5UPwWmrEfF+0uIDORlT+cvrAwgSp7nUF1q8iasledycZ/BxFgQqzNwnkBDwQ
+Z/aM52ArGsTzfMhkZRz9HIEhz1/0mJw8gZtDVQroD8778h8zsx2SrIz7eWQ6uWsD
+QEeSWXpcheiUtEfzkDImjr2DLbwbA23c9LoexUD10nwohhoiQQg77LmvBVxeu7WU
+E63JqaYUlOLOzEmNJp85zekIgR8UTkO7Gc+5BD7P4noYscI7pPOL5rP7YLg15ZFi
+ega+G53NTckRXz4metsd8XFWloDjZJJq4FfD60VuxgXzoMNT9wpFTNSH42PR2s9L
+I1vcl3w8yNccs9se2utM2nLsItZ3J0m/+QSRiw9hbrTYTcM9sXki0DtH2kyIOwYf
+lOrGJDiYOIrXSQK36H0gQ+8omlrUTvUj4msvkXuQjlfgx6sgp2duOAfnGxE7uHnc
+UhnJzzoe6M+LfGHkVQ==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICuDCCAj2gAwIBAgIQSAG6j2WHtWUUuLGJTPb1nTAKBggqhkjOPQQDAzCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLW5vcnRoZWFzdC0yIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyMDE2MzgyNloYDzIxMjEwNTIwMTczODI2WjCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLW5vcnRoZWFzdC0yIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE2eqwU4FOzW8RV1W381Bd
+olhDOrqoMqzWli21oDUt7y8OnXM/lmAuOS6sr8Nt61BLVbONdbr+jgCYw75KabrK
+ZGg3siqvMOgabIKkKuXO14wtrGyGDt7dnKXg5ERGYOZlo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBS1Acp2WYxOcblv5ikZ3ZIbRCCW+zAOBgNVHQ8BAf8E
+BAMCAYYwCgYIKoZIzj0EAwMDaQAwZgIxAJL84J08PBprxmsAKPTotBuVI3MyW1r8
+xQ0i8lgCQUf8GcmYjQ0jI4oZyv+TuYJAcwIxAP9Xpzq0Docxb+4N1qVhpiOfWt1O
+FnemFiy9m1l+wv6p3riQMPV7mBVpklmijkIv3Q==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIRALZLcqCVIJ25maDPE3sbPCIwDQYJKoZIhvcNAQELBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1zb3V0aGVhc3QtMSBSb290IENBIFJTQTIwNDggRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNTIxMjEzOTM5WhgPMjA2MTA1MjEyMjM5Mzla
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtc291dGhlYXN0LTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
+ypKc+6FfGx6Gl6fQ78WYS29QoKgQiur58oxR3zltWeg5fqh9Z85K5S3UbRSTqWWu
+Xcfnkz0/FS07qHX+nWAGU27JiQb4YYqhjZNOAq8q0+ptFHJ6V7lyOqXBq5xOzO8f
++0DlbJSsy7GEtJp7d7QCM3M5KVY9dENVZUKeJwa8PC5StvwPx4jcLeZRJC2rAVDG
+SW7NAInbATvr9ssSh03JqjXb+HDyywiqoQ7EVLtmtXWimX+0b3/2vhqcH5jgcKC9
+IGFydrjPbv4kwMrKnm6XlPZ9L0/3FMzanXPGd64LQVy51SI4d5Xymn0Mw2kMX8s6
+Nf05OsWcDzJ1n6/Q1qHSxQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBRmaIc8eNwGP7i6P7AJrNQuK6OpFzAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZI
+hvcNAQELBQADggEBAIBeHfGwz3S2zwIUIpqEEI5/sMySDeS+3nJR+woWAHeO0C8i
+BJdDh+kzzkP0JkWpr/4NWz84/IdYo1lqASd1Kopz9aT1+iROXaWr43CtbzjXb7/X
+Zv7eZZFC8/lS5SROq42pPWl4ekbR0w8XGQElmHYcWS41LBfKeHCUwv83ATF0XQ6I
+4t+9YSqZHzj4vvedrvcRInzmwWJaal9s7Z6GuwTGmnMsN3LkhZ+/GD6oW3pU/Pyh
+EtWqffjsLhfcdCs3gG8x9BbkcJPH5aPAVkPn4wc8wuXg6xxb9YGsQuY930GWTYRf
+schbgjsuqznW4HHakq4WNhs1UdTSTKkRdZz7FUQ=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIRAM2zAbhyckaqRim63b+Tib8wDQYJKoZIhvcNAQELBQAw
+gZ8xCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE4MDYGA1UEAwwv
+QW1hem9uIFJEUyBQcmV2aWV3IHVzLWVhc3QtMiBSb290IENBIFJTQTIwNDggRzEx
+EDAOBgNVBAcMB1NlYXR0bGUwIBcNMjEwNTE4MjA0OTQ1WhgPMjA2MTA1MTgyMTQ5
+NDVaMIGfMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
+cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExODA2BgNV
+BAMML0FtYXpvbiBSRFMgUHJldmlldyB1cy1lYXN0LTIgUm9vdCBDQSBSU0EyMDQ4
+IEcxMRAwDgYDVQQHDAdTZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEA1ybjQMH1MkbvfKsWJaCTXeCSN1SG5UYid+Twe+TjuSqaXWonyp4WRR5z
+tlkqq+L2MWUeQQAX3S17ivo/t84mpZ3Rla0cx39SJtP3BiA2BwfUKRjhPwOjmk7j
+3zrcJjV5k1vSeLNOfFFSlwyDiVyLAE61lO6onBx+cRjelu0egMGq6WyFVidTdCmT
+Q9Zw3W6LTrnPvPmEyjHy2yCHzH3E50KSd/5k4MliV4QTujnxYexI2eR8F8YQC4m3
+DYjXt/MicbqA366SOoJA50JbgpuVv62+LSBu56FpzY12wubmDZsdn4lsfYKiWxUy
+uc83a2fRXsJZ1d3whxrl20VFtLFHFQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBRC0ytKmDYbfz0Bz0Psd4lRQV3aNTAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggEBAGv8qZu4uaeoF6zsbumauz6ea6tdcWt+hGFuwGrb
+tRbI85ucAmVSX06x59DJClsb4MPhL1XmqO3RxVMIVVfRwRHWOsZQPnXm8OYQ2sny
+rYuFln1COOz1U/KflZjgJmxbn8x4lYiTPZRLarG0V/OsCmnLkQLPtEl/spMu8Un7
+r3K8SkbWN80gg17Q8EV5mnFwycUx9xsTAaFItuG0en9bGsMgMmy+ZsDmTRbL+lcX
+Fq8r4LT4QjrFz0shrzCwuuM4GmcYtBSxlacl+HxYEtAs5k10tmzRf6OYlY33tGf6
+1tkYvKryxDPF/EDgGp/LiBwx6ixYMBfISoYASt4V/ylAlHA=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICtTCCAjqgAwIBAgIRAK9BSZU6nIe6jqfODmuVctYwCgYIKoZIzj0EAwMwgZkx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEyMDAGA1UEAwwpQW1h
+em9uIFJEUyBjYS1jZW50cmFsLTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjEwNTIxMjIxMzA5WhgPMjEyMTA1MjEyMzEzMDlaMIGZMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMjAwBgNVBAMMKUFtYXpv
+biBSRFMgY2EtY2VudHJhbC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEUkEERcgxneT5H+P+fERcbGmf
+bVx+M7rNWtgWUr6w+OBENebQA9ozTkeSg4c4M+qdYSObFqjxITdYxT1z/nHz1gyx
+OKAhLjWu+nkbRefqy3RwXaWT680uUaAP6ccnkZOMo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSN6fxlg0s5Wny08uRBYZcQ3TUoyzAOBgNVHQ8BAf8EBAMC
+AYYwCgYIKoZIzj0EAwMDaQAwZgIxAORaz+MBVoFBTmZ93j2G2vYTwA6T5hWzBWrx
+CrI54pKn5g6At56DBrkjrwZF5T1enAIxAJe/LZ9xpDkAdxDgGJFN8gZYLRWc0NRy
+Rb4hihy5vj9L+w9uKc9VfEBIFuhT7Z3ljg==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIQB/57HSuaqUkLaasdjxUdPjANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChB
+bWF6b24gUkRTIGFwLXNvdXRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUxOTE3NDAzNFoYDzIwNjEwNTE5MTg0MDM0WjCBmDEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChBbWF6
+b24gUkRTIGFwLXNvdXRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtbkaoVsUS76o
+TgLFmcnaB8cswBk1M3Bf4IVRcwWT3a1HeJSnaJUqWHCJ+u3ip/zGVOYl0gN1MgBb
+MuQRIJiB95zGVcIa6HZtx00VezDTr3jgGWRHmRjNVCCHGmxOZWvJjsIE1xavT/1j
+QYV/ph4EZEIZ/qPq7e3rHohJaHDe23Z7QM9kbyqp2hANG2JtU/iUhCxqgqUHNozV
+Zd0l5K6KnltZQoBhhekKgyiHqdTrH8fWajYl5seD71bs0Axowb+Oh0rwmrws3Db2
+Dh+oc2PwREnjHeca9/1C6J2vhY+V0LGaJmnnIuOANrslx2+bgMlyhf9j0Bv8AwSi
+dSWsobOhNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBQb7vJT
+VciLN72yJGhaRKLn6Krn2TAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBAAxEj8N9GslReAQnNOBpGl8SLgCMTejQ6AW/bapQvzxrZrfVOZOYwp/5oV0f
+9S1jcGysDM+DrmfUJNzWxq2Y586R94WtpH4UpJDGqZp+FuOVJL313te4609kopzO
+lDdmd+8z61+0Au93wB1rMiEfnIMkOEyt7D2eTFJfJRKNmnPrd8RjimRDlFgcLWJA
+3E8wca67Lz/G0eAeLhRHIXv429y8RRXDtKNNz0wA2RwURWIxyPjn1fHjA9SPDkeW
+E1Bq7gZj+tBnrqz+ra3yjZ2blss6Ds3/uRY6NYqseFTZWmQWT7FolZEnT9vMUitW
+I0VynUbShVpGf6946e0vgaaKw20=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQGyUVTaVjYJvWhroVEiHPpDANBgkqhkiG9w0BAQsFADCB
+lzELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdB
+bWF6b24gUkRTIHVzLXdlc3QtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjEwNTE5MTkwNDA2WhgPMjA2MTA1MTkyMDA0MDZaMIGXMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpv
+biBSRFMgdXMtd2VzdC0xIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANhyXpJ0t4nigRDZ
+EwNtFOem1rM1k8k5XmziHKDvDk831p7QsX9ZOxl/BT59Pu/P+6W6SvasIyKls1sW
+FJIjFF+6xRQcpoE5L5evMgN/JXahpKGeQJPOX9UEXVW5B8yi+/dyUitFT7YK5LZA
+MqWBN/LtHVPa8UmE88RCDLiKkqiv229tmwZtWT7nlMTTCqiAHMFcryZHx0pf9VPh
+x/iPV8p2gBJnuPwcz7z1kRKNmJ8/cWaY+9w4q7AYlAMaq/rzEqDaN2XXevdpsYAK
+TMMj2kji4x1oZO50+VPNfBl5ZgJc92qz1ocF95SAwMfOUsP8AIRZkf0CILJYlgzk
+/6u6qZECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm5jfcS9o
++LwL517HpB6hG+PmpBswDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4IB
+AQAcQ6lsqxi63MtpGk9XK8mCxGRLCad51+MF6gcNz6i6PAqhPOoKCoFqdj4cEQTF
+F8dCfa3pvfJhxV6RIh+t5FCk/y6bWT8Ls/fYKVo6FhHj57bcemWsw/Z0XnROdVfK
+Yqbc7zvjCPmwPHEqYBhjU34NcY4UF9yPmlLOL8uO1JKXa3CAR0htIoW4Pbmo6sA4
+6P0co/clW+3zzsQ92yUCjYmRNeSbdXbPfz3K/RtFfZ8jMtriRGuO7KNxp8MqrUho
+HK8O0mlSUxGXBZMNicfo7qY8FD21GIPH9w5fp5oiAl7lqFzt3E3sCLD3IiVJmxbf
+fUwpGd1XZBBSdIxysRLM6j48
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrTCCAjOgAwIBAgIQU+PAILXGkpoTcpF200VD/jAKBggqhkjOPQQDAzCBljEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMS8wLQYDVQQDDCZBbWF6
+b24gUkRTIGFwLWVhc3QtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTAgFw0yMTA1MjUyMTQ1MTFaGA8yMTIxMDUyNTIyNDUxMVowgZYxCzAJBgNV
+BAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYD
+VQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1hem9uIFJE
+UyBhcC1lYXN0LTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1NlYXR0bGUw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAT3tFKE8Kw1sGQAvNLlLhd8OcGhlc7MiW/s
+NXm3pOiCT4vZpawKvHBzD76Kcv+ZZzHRxQEmG1/muDzZGlKR32h8AAj+NNO2Wy3d
+CKTtYMiVF6Z2zjtuSkZQdjuQbe4eQ7qjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFAiSQOp16Vv0Ohpvqcbd2j5RmhYNMA4GA1UdDwEB/wQEAwIBhjAKBggq
+hkjOPQQDAwNoADBlAjBVsi+5Ape0kOhMt/WFkANkslD4qXA5uqhrfAtH29Xzz2NV
+tR7akiA771OaIGB/6xsCMQCZt2egCtbX7J0WkuZ2KivTh66jecJr5DHvAP4X2xtS
+F/5pS+AUhcKTEGjI9jDH3ew=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICuDCCAj2gAwIBAgIQT5mGlavQzFHsB7hV6Mmy6TAKBggqhkjOPQQDAzCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLXNvdXRoZWFzdC0yIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyNDIwNTAxNVoYDzIxMjEwNTI0MjE1MDE1WjCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLXNvdXRoZWFzdC0yIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEcm4BBBjYK7clwm0HJRWS
+flt3iYwoJbIXiXn9c1y3E+Vb7bmuyKhS4eO8mwO4GefUcXObRfoHY2TZLhMJLVBQ
+7MN2xDc0RtZNj07BbGD3VAIFRTDX0mH9UNYd0JQM3t/Oo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRrd5ITedfAwrGo4FA9UaDaGFK3rjAOBgNVHQ8BAf8E
+BAMCAYYwCgYIKoZIzj0EAwMDaQAwZgIxAPBNqmVv1IIA3EZyQ6XuVf4gj79/DMO8
+bkicNS1EcBpUqbSuU4Zwt2BYc8c/t7KVOQIxAOHoWkoKZPiKyCxfMtJpCZySUG+n
+sXgB/LOyWE5BJcXUfm+T1ckeNoWeUUMOLmnJjg==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIRAJcDeinvdNrDQBeJ8+t38WQwDQYJKoZIhvcNAQELBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1zb3V0aGVhc3QtNCBSb290IENBIFJTQTIwNDggRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjIwNTI1MTY0OTE2WhgPMjA2MjA1MjUxNzQ5MTZa
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtc291dGhlYXN0LTQgUm9vdCBDQSBSU0EyMDQ4IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
+k8DBNkr9tMoIM0NHoFiO7cQfSX0cOMhEuk/CHt0fFx95IBytx7GHCnNzpM27O5z6
+x6iRhfNnx+B6CrGyCzOjxvPizneY+h+9zfvNz9jj7L1I2uYMuiNyOKR6FkHR46CT
+1CiArfVLLPaTqgD/rQjS0GL2sLHS/0dmYipzynnZcs613XT0rAWdYDYgxDq7r/Yi
+Xge5AkWQFkMUq3nOYDLCyGGfQqWKkwv6lZUHLCDKf+Y0Uvsrj8YGCI1O8mF0qPCQ
+lmlfaDvbuBu1AV+aabmkvyFj3b8KRIlNLEtQ4N8KGYR2Jdb82S4YUGIOAt4wuuFt
+1B7AUDLk3V/u+HTWiwfoLQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBSNpcjz6ArWBtAA+Gz6kyyZxrrgdDAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZI
+hvcNAQELBQADggEBAGJEd7UgOzHYIcQRSF7nSYyjLROyalaIV9AX4WXW/Cqlul1c
+MblP5etDZm7A/thliZIWAuyqv2bNicmS3xKvNy6/QYi1YgxZyy/qwJ3NdFl067W0
+t8nGo29B+EVK94IPjzFHWShuoktIgp+dmpijB7wkTIk8SmIoe9yuY4+hzgqk+bo4
+ms2SOXSN1DoQ75Xv+YmztbnZM8MuWhL1T7hA4AMorzTQLJ9Pof8SpSdMHeDsHp0R
+01jogNFkwy25nw7cL62nufSuH2fPYGWXyNDg+y42wKsKWYXLRgUQuDVEJ2OmTFMB
+T0Vf7VuNijfIA9hkN2d3K53m/9z5WjGPSdOjGhg=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQRiwspKyrO0xoxDgSkqLZczANBgkqhkiG9w0BAQsFADCB
+lzELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdB
+bWF6b24gUkRTIHVzLXdlc3QtMiBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjEwNTI0MjE1OTAwWhgPMjA2MTA1MjQyMjU5MDBaMIGXMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpv
+biBSRFMgdXMtd2VzdC0yIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL53Jk3GsKiu+4bx
+jDfsevWbwPCNJ3H08Zp7GWhvI3Tgi39opfHYv2ku2BKFjK8N2L6RvNPSR8yplv5j
+Y0tK0U+XVNl8o0ibhqRDhbTuh6KL8CFINWYzAajuxFS+CF0U6c1Q3tXLBdALxA7l
+FlXJ71QrP06W31kRe7kvgrvO7qWU3/OzUf9qYw4LSiR1/VkvvRCTqcVNw09clw/M
+Jbw6FSgweN65M9j7zPbjGAXSHkXyxH1Erin2fa+B9PE4ZDgX9cp2C1DHewYJQL/g
+SepwwcudVNRN1ibKH7kpMrgPnaNIVNx5sXVsTjk6q2ZqYw3SVHegltJpLy/cZReP
+mlivF2kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUmTcQd6o1
+CuS65MjBrMwQ9JJjmBwwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4IB
+AQAKSDSIzl956wVddPThf2VAzI8syw9ngSwsEHZvxVGHBvu5gg618rDyguVCYX9L
+4Kw/xJrk6S3qxOS2ZDyBcOpsrBskgahDFIunzoRP3a18ARQVq55LVgfwSDQiunch
+Bd05cnFGLoiLkR5rrkgYaP2ftn3gRBRaf0y0S3JXZ2XB3sMZxGxavYq9mfiEcwB0
+LMTMQ1NYzahIeG6Jm3LqRqR8HkzP/Ztq4dT2AtSLvFebbNMiWqeqT7OcYp94HTYT
+zqrtaVdUg9bwyAUCDgy0GV9RHDIdNAOInU/4LEETovrtuBU7Z1q4tcHXvN6Hd1H8
+gMb0mCG5I393qW5hFsA/diFb
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIRAPQAvihfjBg/JDbj6U64K98wDQYJKoZIhvcNAQELBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1ub3J0aGVhc3QtMiBSb290IENBIFJTQTIwNDggRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNTIwMTYyODQxWhgPMjA2MTA1MjAxNzI4NDFa
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtbm9ydGhlYXN0LTIgUm9vdCBDQSBSU0EyMDQ4IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
+vJ9lgyksCxkBlY40qOzI1TCj/Q0FVGuPL/Z1Mw2YN0l+41BDv0FHApjTUkIKOeIP
+nwDwpXTa3NjYbk3cOZ/fpH2rYJ++Fte6PNDGPgKppVCUh6x3jiVZ1L7wOgnTdK1Q
+Trw8440IDS5eLykRHvz8OmwvYDl0iIrt832V0QyOlHTGt6ZJ/aTQKl12Fy3QBLv7
+stClPzvHTrgWqVU6uidSYoDtzHbU7Vda7YH0wD9IUoMBf7Tu0rqcE4uH47s2XYkc
+SdLEoOg/Ngs7Y9B1y1GCyj3Ux7hnyvCoRTw014QyNB7dTatFMDvYlrRDGG14KeiU
+UL7Vo/+EejWI31eXNLw84wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBQkgTWFsNg6wA3HbbihDQ4vpt1E2zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZI
+hvcNAQELBQADggEBAGz1Asiw7hn5WYUj8RpOCzpE0h/oBZcnxP8wulzZ5Xd0YxWO
+0jYUcUk3tTQy1QvoY+Q5aCjg6vFv+oFBAxkib/SmZzp4xLisZIGlzpJQuAgRkwWA
+6BVMgRS+AaOMQ6wKPgz1x4v6T0cIELZEPq3piGxvvqkcLZKdCaeC3wCS6sxuafzZ
+4qA3zMwWuLOzRftgX2hQto7d/2YkRXga7jSvQl3id/EI+xrYoH6zIWgjdU1AUaNq
+NGT7DIo47vVMfnd9HFZNhREsd4GJE83I+JhTqIxiKPNxrKgESzyADmNPt0gXDnHo
+tbV1pMZz5HpJtjnP/qVZhEK5oB0tqlKPv9yx074=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICuTCCAj6gAwIBAgIRAKp1Rn3aL/g/6oiHVIXtCq8wCgYIKoZIzj0EAwMwgZsx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE0MDIGA1UEAwwrQW1h
+em9uIFJEUyBhcC1ub3J0aGVhc3QtMyBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTAgFw0yMTA1MjQyMDMyMTdaGA8yMTIxMDUyNDIxMzIxN1owgZsx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE0MDIGA1UEAwwrQW1h
+em9uIFJEUyBhcC1ub3J0aGVhc3QtMyBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABGTYWPILeBJXfcL3Dz4z
+EWMUq78xB1HpjBwHoTURYfcMd5r96BTVG6yaUBWnAVCMeeD6yTG9a1eVGNhG14Hk
+ZAEjgLiNB7RRbEG5JZ/XV7W/vODh09WCst2y9SLKsdgeAaNCMEAwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQUoE0qZHmDCDB+Bnm8GUa/evpfPwgwDgYDVR0PAQH/
+BAQDAgGGMAoGCCqGSM49BAMDA2kAMGYCMQCnil5MMwhY3qoXv0xvcKZGxGPaBV15
+0CCssCKn0oVtdJQfJQ3Jrf3RSaEyijXIJsoCMQC35iJi4cWoNX3N/qfgnHohW52O
+B5dg0DYMqy5cNZ40+UcAanRMyqNQ6P7fy3umGco=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICtzCCAj2gAwIBAgIQPXnDTPegvJrI98qz8WxrMjAKBggqhkjOPQQDAzCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIEJldGEgdXMtZWFzdC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUxODIxNDAxMloYDzIxMjEwNTE4MjI0MDEyWjCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIEJldGEgdXMtZWFzdC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEI0sR7gwutK5AB46hM761
+gcLTGBIYlURSEoM1jcBwy56CL+3CJKZwLLyJ7qoOKfWbu5GsVLUTWS8MV6Nw33cx
+2KQD2svb694wi+Px2f4n9+XHkEFQw8BbiodDD7RZA70fo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBTQSioOvnVLEMXwNSDg+zgln/vAkjAOBgNVHQ8BAf8E
+BAMCAYYwCgYIKoZIzj0EAwMDaAAwZQIxAMwu1hqm5Bc98uE/E0B5iMYbBQ4kpMxO
+tP8FTfz5UR37HUn26nXE0puj6S/Ffj4oJgIwXI7s2c26tFQeqzq6u3lrNJHp5jC9
+Uxlo/hEJOLoDj5jnpxo8dMAtCNoQPaHdfL0P
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/jCCA+agAwIBAgIQEM1pS+bWfBJeu/6j1yIIFzANBgkqhkiG9w0BAQwFADCB
+lzELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdB
+bWF6b24gUkRTIGNhLXdlc3QtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjMwOTE5MjIwMTM5WhgPMjEyMzA5MTkyMzAxMzlaMIGXMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpv
+biBSRFMgY2Etd2VzdC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Pyp8p5z6HnlGB
+daOj78gZ3ABufxnBFiu5NdFiGoMrS+eY//xxr2iKbnynJAzjmn5A6VKMNxtbuYIZ
+WKAzDb/HrWlIYD2w7ZVBXpylfPhiz3jLNsl03WdPNnEruCcivhY2QMewEVtzjPU0
+ofdbZlO2KpF3biv1gjPuIuE7AUyQAbWnWTlrzETAVWLboJJRRqxASSkFUHNLXod7
+ow02FwlAhcnCp9gSe1SKRDrpvvEvYQBAFB7owfnoQzOGDdd87RGyYfyuW8aFI2Z0
+LHNvsA0dTafO4Rh986c72kDL7ijICQdr5OTgZR2OnuESLk1DSK4xYJ4fA6jb5dJ5
++xsI6tCPykWCW98aO/pha35OsrVNifL/5cH5pdv/ecgQGdffJB+Vdj6f/ZMwR6s/
+Rm37cQ9l3tU8eu/qpzsFjLq1ZUzDaVDWgMW9t49+q/zjhdmbPOabZDao7nHXrVRw
+rwPHWCmEY4OmH6ikEKQW3AChFjOdSg4me/J0Jr5l5jKggLPHWbNLRO8qTTK6N8qk
+ui3aJDi+XQfsTPARXIw4UFErArNImTsoZVyqfX7I4shp0qZbEhP6kRAbfPljw5kW
+Yat7ZlXqDanjsreqbLTaOU10P0rC0/4Ctv5cLSKCrzRLWtpXxhKa2wJTQ74G6fAZ
+1oUA79qg3F8nyM+ZzDsfNI854+PNAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8w
+HQYDVR0OBBYEFLRWiDabEQZNkzEPUCr1ZVJV6xpwMA4GA1UdDwEB/wQEAwIBhjAN
+BgkqhkiG9w0BAQwFAAOCAgEATkVVzkkGBjEtLGDtERi+fSpIV0MxwAsA4PAeBBmb
+myxo90jz6kWkKM1Wm4BkZM8/mq5VbxPef1kxHfb5CHksCL6SgG5KujfIvht+KT2a
+MRJB+III3CbcTy0HtwCX5AlPIbXWydhQFoJTW/OkpecUWoyFM6SqYeYZx1itJpxl
+sXshLjYOvw+QgvxRsDxqUfkcaC/N2yhu/30Zo2P8msJfAFry2UmA/TBrWOQKVQxl
+Ee/yWgp4U/bC/GZnjWnWDTwkRFGQtI4wjxbVuX6V4FTLCT7kIoHBhG+zOSduJRn3
+Axej7gkEXEVc/PAnwp/kSJ/b0/JONLWdjGUFkyiMn1yJlhJ2sg39vepBN5r6yVYU
+nJWoZAuupRpoIKfmC3/cZanXqYbYl4yxzX/PMB4kAACfdxGxLawjnnBjSzaWokXs
+YVh2TjWpUMwLOi0RB2mtPUjHdDLKtjOTZ1zHZnR/wVp9BmVI1BXYnz5PAqU5XqeD
+EmanyaAuFCeyol1EtbQhgtysThQ+vwYAXMm2iKzJxq0hik8wyG8X55FhnGEOGV3u
+xxq7odd3/8BXkc3dGdBPQtH+k5glaQyPnAsLVAIUvyzTmy58saL+nJnQY4mmRrwV
+1jJA7nnkaklI/L5fvfCg0W+TMinCOAGd+GQ4hK2SAsJLtcqiBgPf2wJHO8wiwUh9
+Luw=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQGKVv+5VuzEZEBzJ+bVfx2zAKBggqhkjOPQQDAzCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGFwLXNvdXRoLTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTE5MTc1MDU5WhgPMjEyMTA1MTkxODUwNTlaMIGXMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpvbiBS
+RFMgYXAtc291dGgtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2VhdHRs
+ZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABMqdLJ0tZF/DGFZTKZDrGRJZID8ivC2I
+JRCYTWweZKCKSCAzoiuGGHzJhr5RlLHQf/QgmFcgXsdmO2n3CggzhA4tOD9Ip7Lk
+P05eHd2UPInyPCHRgmGjGb0Z+RdQ6zkitKNCMEAwDwYDVR0TAQH/BAUwAwEB/zAd
+BgNVHQ4EFgQUC1yhRgVqU5bR8cGzOUCIxRpl4EYwDgYDVR0PAQH/BAQDAgGGMAoG
+CCqGSM49BAMDA2cAMGQCMG0c/zLGECRPzGKJvYCkpFTCUvdP4J74YP0v/dPvKojL
+t/BrR1Tg4xlfhaib7hPc7wIwFvgqHes20CubQnZmswbTKLUrgSUW4/lcKFpouFd2
+t2/ewfi/0VhkeUW+IiHhOMdU
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGCTCCA/GgAwIBAgIRAOXxJuyXVkbfhZCkS/dOpfEwDQYJKoZIhvcNAQEMBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1ub3J0aGVhc3QtMSBSb290IENBIFJTQTQwOTYgRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNTI1MjE1OTEwWhgPMjEyMTA1MjUyMjU5MTBa
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtbm9ydGhlYXN0LTEgUm9vdCBDQSBSU0E0MDk2IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA
+xiP4RDYm4tIS12hGgn1csfO8onQDmK5SZDswUpl0HIKXOUVVWkHNlINkVxbdqpqH
+FhbyZmNN6F/EWopotMDKe1B+NLrjNQf4zefv2vyKvPHJXhxoKmfyuTd5Wk8k1F7I
+lNwLQzznB+ElhrLIDJl9Ro8t31YBBNFRGAGEnxyACFGcdkjlsa52UwfYrwreEg2l
+gW5AzqHgjFfj9QRLydeU/n4bHm0F1adMsV7P3rVwilcUlqsENDwXnWyPEyv3sw6F
+wNemLEs1129mB77fwvySb+lLNGsnzr8w4wdioZ74co+T9z2ca+eUiP+EQccVw1Is
+D4Fh57IjPa6Wuc4mwiUYKkKY63+38aCfEWb0Qoi+zW+mE9nek6MOQ914cN12u5LX
+dBoYopphRO5YmubSN4xcBy405nIdSdbrAVWwxXnVVyjqjknmNeqQsPZaxAhdoKhV
+AqxNr8AUAdOAO6Sz3MslmcLlDXFihrEEOeUbpg/m1mSUUHGbu966ajTG1FuEHHwS
+7WB52yxoJo/tHvt9nAWnh3uH5BHmS8zn6s6CGweWKbX5yICnZ1QFR1e4pogxX39v
+XD6YcNOO+Vn+HY4nXmjgSYVC7l+eeP8eduMg1xJujzjrbmrXU+d+cBObgdTOAlpa
+JFHaGwYw1osAwPCo9cZ2f04yitBfj9aPFia8ASKldakCAwEAAaNCMEAwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUqKS+ltlior0SyZKYAkJ/efv55towDgYDVR0P
+AQH/BAQDAgGGMA0GCSqGSIb3DQEBDAUAA4ICAQAdElvp8bW4B+Cv+1WSN87dg6TN
+wGyIjJ14/QYURgyrZiYpUmZpj+/pJmprSWXu4KNyqHftmaidu7cdjL5nCAvAfnY5
+/6eDDbX4j8Gt9fb/6H9y0O0dn3mUPSEKG0crR+JRFAtPhn/2FNvst2P82yguWLv0
+pHjHVUVcq+HqDMtUIJsTPYjSh9Iy77Q6TOZKln9dyDOWJpCSkiUWQtMAKbCSlvzd
+zTs/ahqpT+zLfGR1SR+T3snZHgQnbnemmz/XtlKl52NxccARwfcEEKaCRQyGq/pR
+0PVZasyJS9JY4JfQs4YOdeOt4UMZ8BmW1+BQWGSkkb0QIRl8CszoKofucAlqdPcO
+IT/ZaMVhI580LFGWiQIizWFskX6lqbCyHqJB3LDl8gJISB5vNTHOHpvpMOMs5PYt
+cRl5Mrksx5MKMqG7y5R734nMlZxQIHjL5FOoOxTBp9KeWIL/Ib89T2QDaLw1SQ+w
+ihqWBJ4ZdrIMWYpP3WqM+MXWk7WAem+xsFJdR+MDgOOuobVQTy5dGBlPks/6gpjm
+rO9TjfQ36ppJ3b7LdKUPeRfnYmlR5RU4oyYJ//uLbClI443RZAgxaCXX/nyc12lr
+eVLUMNF2abLX4/VF63m2/Z9ACgMRfqGshPssn1NN33OonrotQoj4S3N9ZrjvzKt8
+iHcaqd60QKpfiH2A3A==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICuDCCAj2gAwIBAgIQPaVGRuu86nh/ylZVCLB0MzAKBggqhkjOPQQDAzCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLW5vcnRoZWFzdC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyNTIyMDMxNloYDzIxMjEwNTI1MjMwMzE2WjCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLW5vcnRoZWFzdC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEexNURoB9KE93MEtEAlJG
+obz4LS/pD2hc8Gczix1WhVvpJ8bN5zCDXaKdnDMCebetyRQsmQ2LYlfmCwpZwSDu
+0zowB11Pt3I5Avu2EEcuKTlKIDMBeZ1WWuOd3Tf7MEAMo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBSaYbZPBvFLikSAjpa8mRJvyArMxzAOBgNVHQ8BAf8E
+BAMCAYYwCgYIKoZIzj0EAwMDaQAwZgIxAOEJkuh3Zjb7Ih/zuNRd1RBqmIYcnyw0
+nwUZczKXry+9XebYj3VQxSRNadrarPWVqgIxAMg1dyGoDAYjY/L/9YElyMnvHltO
+PwpJShmqHvCLc/mXMgjjYb/akK7yGthvW6j/uQ==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGCDCCA/CgAwIBAgIQChu3v5W1Doil3v6pgRIcVzANBgkqhkiG9w0BAQwFADCB
+nDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTUwMwYDVQQDDCxB
+bWF6b24gUkRTIEJldGEgdXMtZWFzdC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4G
+A1UEBwwHU2VhdHRsZTAgFw0yMTA1MTgyMTM0MTVaGA8yMTIxMDUxODIyMzQxNVow
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBCZXRhIHVzLWVhc3QtMSBSb290IENBIFJTQTQwOTYgRzExEDAO
+BgNVBAcMB1NlYXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC1
+FUGQ5tf3OwpDR6hGBxhUcrkwKZhaXP+1St1lSOQvjG8wXT3RkKzRGMvb7Ee0kzqI
+mzKKe4ASIhtV3UUWdlNmP0EA3XKnif6N79MismTeGkDj75Yzp5A6tSvqByCgxIjK
+JqpJrch3Dszoyn8+XhwDxMZtkUa5nQVdJgPzJ6ltsQ8E4SWLyLtTu0S63jJDkqYY
+S7cQblk7y7fel+Vn+LS5dGTdRRhMvSzEnb6mkVBaVzRyVX90FNUED06e8q+gU8Ob
+htvQlf9/kRzHwRAdls2YBhH40ZeyhpUC7vdtPwlmIyvW5CZ/QiG0yglixnL6xahL
+pbmTuTSA/Oqz4UGQZv2WzHe1lD2gRHhtFX2poQZeNQX8wO9IcUhrH5XurW/G9Xwl
+Sat9CMPERQn4KC3HSkat4ir2xaEUrjfg6c4XsGyh2Pk/LZ0gLKum0dyWYpWP4JmM
+RQNjrInXPbMhzQObozCyFT7jYegS/3cppdyy+K1K7434wzQGLU1gYXDKFnXwkX8R
+bRKgx2pHNbH5lUddjnNt75+e8m83ygSq/ZNBUz2Ur6W2s0pl6aBjwaDES4VfWYlI
+jokcmrGvJNDfQWygb1k00eF2bzNeNCHwgWsuo3HSxVgc/WGsbcGrTlDKfz+g3ich
+bXUeUidPhRiv5UQIVCLIHpHuin3bj9lQO/0t6p+tAQIDAQABo0IwQDAPBgNVHRMB
+Af8EBTADAQH/MB0GA1UdDgQWBBSFmMBgm5IsRv3hLrvDPIhcPweXYTAOBgNVHQ8B
+Af8EBAMCAYYwDQYJKoZIhvcNAQEMBQADggIBAAa2EuozymOsQDJlEi7TqnyA2OhT
+GXPfYqCyMJVkfrqNgcnsNpCAiNEiZbb+8sIPXnT8Ay8hrwJYEObJ5b7MHXpLuyft
+z0Pu1oFLKnQxKjNxrIsCvaB4CRRdYjm1q7EqGhMGv76se9stOxkOqO9it31w/LoU
+ENDk7GLsSqsV1OzYLhaH8t+MaNP6rZTSNuPrHwbV3CtBFl2TAZ7iKgKOhdFz1Hh9
+Pez0lG+oKi4mHZ7ajov6PD0W7njn5KqzCAkJR6OYmlNVPjir+c/vUtEs0j+owsMl
+g7KE5g4ZpTRShyh5BjCFRK2tv0tkqafzNtxrKC5XNpEkqqVTCnLcKG+OplIEadtr
+C7UWf4HyhCiR+xIyxFyR05p3uY/QQU/5uza7GlK0J+U1sBUytx7BZ+Fo8KQfPPqV
+CqDCaYUksoJcnJE/KeoksyqNQys7sDGJhkd0NeUGDrFLKHSLhIwAMbEWnqGxvhli
+E7sP2E5rI/I9Y9zTbLIiI8pfeZlFF8DBdoP/Hzg8pqsiE/yiXSFTKByDwKzGwNqz
+F0VoFdIZcIbLdDbzlQitgGpJtvEL7HseB0WH7B2PMMD8KPJlYvPveO3/6OLzCsav
++CAkvk47NQViKMsUTKOA0JDCW+u981YRozxa3K081snhSiSe83zIPBz1ikldXxO9
+6YYLNPRrj3mi9T/f
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjSgAwIBAgIRAMkvdFnVDb0mWWFiXqnKH68wCgYIKoZIzj0EAwMwgZYx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1h
+em9uIFJEUyB1cy13ZXN0LTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTE5MTkxMzI0WhgPMjEyMTA1MTkyMDEzMjRaMIGWMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExLzAtBgNVBAMMJkFtYXpvbiBS
+RFMgdXMtd2VzdC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdTZWF0dGxl
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEy86DB+9th/0A5VcWqMSWDxIUblWTt/R0
+ao6Z2l3vf2YDF2wt1A2NIOGpfQ5+WAOJO/IQmnV9LhYo+kacB8sOnXdQa6biZZkR
+IyouUfikVQAKWEJnh1Cuo5YMM4E2sUt5o0IwQDAPBgNVHRMBAf8EBTADAQH/MB0G
+A1UdDgQWBBQ8u3OnecANmG8OoT7KLWDuFzZwBTAOBgNVHQ8BAf8EBAMCAYYwCgYI
+KoZIzj0EAwMDaAAwZQIwQ817qkb7mWJFnieRAN+m9W3E0FLVKaV3zC5aYJUk2fcZ
+TaUx3oLp3jPLGvY5+wgeAjEA6wAicAki4ZiDfxvAIuYiIe1OS/7H5RA++R8BH6qG
+iRzUBM/FItFpnkus7u/eTkvo
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrzCCAjWgAwIBAgIQS/+Ryfgb/IOVEa1pWoe8oTAKBggqhkjOPQQDAzCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGFwLXNvdXRoLTIgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjIwNjA2MjE1NDQyWhgPMjEyMjA2MDYyMjU0NDJaMIGXMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpvbiBS
+RFMgYXAtc291dGgtMiBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2VhdHRs
+ZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDsX6fhdUWBQpYTdseBD/P3s96Dtw2Iw
+OrXKNToCnmX5nMkUGdRn9qKNiz1pw3EPzaPxShbYwQ7LYP09ENK/JN4QQjxMihxC
+jLFxS85nhBQQQGRCWikDAe38mD8fSvREQKNCMEAwDwYDVR0TAQH/BAUwAwEB/zAd
+BgNVHQ4EFgQUIh1xZiseQYFjPYKJmGbruAgRH+AwDgYDVR0PAQH/BAQDAgGGMAoG
+CCqGSM49BAMDA2gAMGUCMFudS4zLy+UUGrtgNLtRMcu/DZ9BUzV4NdHxo0bkG44O
+thnjl4+wTKI6VbyAbj2rkgIxAOHps8NMITU5DpyiMnKTxV8ubb/WGHrLl0BjB8Lw
+ETVJk5DNuZvsIIcm7ykk6iL4Tw==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGBDCCA+ygAwIBAgIQDcEmNIAVrDpUw5cH5ynutDANBgkqhkiG9w0BAQwFADCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIG1lLWNlbnRyYWwtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNV
+BAcMB1NlYXR0bGUwIBcNMjIwNTA3MDA0MDIzWhgPMjEyMjA1MDcwMTQwMjNaMIGa
+MQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5j
+LjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMzAxBgNVBAMMKkFt
+YXpvbiBSRFMgbWUtY2VudHJhbC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKvADk8t
+Fl9bFlU5sajLPPDSOUpPAkKs6iPlz+27o1GJC88THcOvf3x0nVAcu9WYe9Qaas+4
+j4a0vv51agqyODRD/SNi2HnqW7DbtLPAm6KBHe4twl28ItB/JD5g7u1oPAHFoXMS
+cH1CZEAs5RtlZGzJhcBXLFsHNv/7+SCLyZ7+2XFh9OrtgU4wMzkHoRNndhfwV5bu
+17bPTwuH+VxH37zXf1mQ/KjhuJos0C9dL0FpjYBAuyZTAWhZKs8dpSe4DI544z4w
+gkwUB4bC2nA1TBzsywEAHyNuZ/xRjNpWvx0ToWAA2iFJqC3VO3iKcnBplMvaUuMt
+jwzVSNBnKcoabXCZL2XDLt4YTZR8FSwz05IvsmwcPB7uNTBXq3T9sjejW8QQK3vT
+tzyfLq4jKmQE7PoS6cqYm+hEPm2hDaC/WP9bp3FdEJxZlPH26fq1b7BWYWhQ9pBA
+Nv9zTnzdR1xohTyOJBUFQ81ybEzabqXqVXUIANqIOaNcTB09/sLJ7+zuMhp3mwBu
+LtjfJv8PLuT1r63bU3seROhKA98b5KfzjvbvPSg3vws78JQyoYGbqNyDfyjVjg3U
+v//AdVuPie6PNtdrW3upZY4Qti5IjP9e3kimaJ+KAtTgMRG56W0WxD3SP7+YGGbG
+KhntDOkKsN39hLpn9UOafTIqFu7kIaueEy/NAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFHAems86dTwdZbLe8AaPy3kfIUVoMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQwFAAOCAgEAOBHpp0ICx81kmeoBcZTrMdJs2gnhcd85
+FoSCjXx9H5XE5rmN/lQcxxOgj8hr3uPuLdLHu+i6THAyzjrl2NA1FWiqpfeECGmy
+0jm7iZsYORgGQYp/VKnDrwnKNSqlZvOuRr0kfUexwFlr34Y4VmupvEOK/RdGsd3S
++3hiemcHse9ST/sJLHx962AWMkN86UHPscJEe4+eT3f2Wyzg6La8ARwdWZSNS+WH
+ZfybrncMmuiXuUdHv9XspPsqhKgtHhcYeXOGUtrwQPLe3+VJZ0LVxhlTWr9951GZ
+GfmWwTV/9VsyKVaCFIXeQ6L+gjcKyEzYF8wpMtQlSc7FFqwgC4bKxvMBSaRy88Nr
+lV2+tJD/fr8zGUeBK44Emon0HKDBWGX+/Hq1ZIv0Da0S+j6LbA4fusWxtGfuGha+
+luhHgVInCpALIOamiBEdGhILkoTtx7JrYppt3/Raqg9gUNCOOYlCvGhqX7DXeEfL
+DGabooiY2FNWot6h04JE9nqGj5QqT8D6t/TL1nzxhRPzbcSDIHUd/b5R+a0bAA+7
+YTU6JqzEVCWKEIEynYmqikgLMGB/OzWsgyEL6822QW6hJAQ78XpbNeCzrICF4+GC
+7KShLnwuWoWpAb26268lvOEvCTFM47VC6jNQl97md+2SA9Ma81C9wflid2M83Wle
+cuLMVcQZceE=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIQAhAteLRCvizAElaWORFU2zANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChB
+bWF6b24gUkRTIG1lLXNvdXRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyMDE3MDkxNloYDzIwNjEwNTIwMTgwOTE2WjCBmDEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChBbWF6
+b24gUkRTIG1lLXNvdXRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+qg7JAcOVKjh
+N83SACnBFZPyB63EusfDr/0V9ZdL8lKcmZX9sv/CqoBo3N0EvBqHQqUUX6JvFb7F
+XrMUZ740kr28gSRALfXTFgNODjXeDsCtEkKRTkac/UM8xXHn+hR7UFRPHS3e0GzI
+iLiwQWDkr0Op74W8aM0CfaVKvh2bp4BI1jJbdDnQ9OKXpOxNHGUf0ZGb7TkNPkgI
+b2CBAc8J5o3H9lfw4uiyvl6Fz5JoP+A+zPELAioYBXDrbE7wJeqQDJrETWqR9VEK
+BXURCkVnHeaJy123MpAX2ozf4pqk0V0LOEOZRS29I+USF5DcWr7QIXR/w2I8ws1Q
+7ys+qbE+kQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBQFJ16n
+1EcCMOIhoZs/F9sR+Jy++zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBAOc5nXbT3XTDEZsxX2iD15YrQvmL5m13B3ImZWpx/pqmObsgx3/dg75rF2nQ
+qS+Vl+f/HLh516pj2BPP/yWCq12TRYigGav8UH0qdT3CAClYy2o+zAzUJHm84oiB
+ud+6pFVGkbqpsY+QMpJUbZWu52KViBpJMYsUEy+9cnPSFRVuRAHjYynSiLk2ZEjb
+Wkdc4x0nOZR5tP0FgrX0Ve2KcjFwVQJVZLgOUqmFYQ/G0TIIGTNh9tcmR7yp+xJR
+A2tbPV2Z6m9Yxx4E8lLEPNuoeouJ/GR4CkMEmF8cLwM310t174o3lKKUXJ4Vs2HO
+Wj2uN6R9oI+jGLMSswTzCNV1vgc=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICuDCCAj6gAwIBAgIRAOocLeZWjYkG/EbHmscuy8gwCgYIKoZIzj0EAwMwgZsx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE0MDIGA1UEAwwrQW1h
+em9uIFJEUyBhcC1zb3V0aGVhc3QtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTAgFw0yMTA1MjEyMTUwMDFaGA8yMTIxMDUyMTIyNTAwMVowgZsx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE0MDIGA1UEAwwrQW1h
+em9uIFJEUyBhcC1zb3V0aGVhc3QtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABCEr3jq1KtRncnZfK5cq
+btY0nW6ZG3FMbh7XwBIR6Ca0f8llGZ4vJEC1pXgiM/4Dh045B9ZIzNrR54rYOIfa
+2NcYZ7mk06DjIQML64hbAxbQzOAuNzLPx268MrlL2uW2XaNCMEAwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQUln75pChychwN4RfHl+tOinMrfVowDgYDVR0PAQH/
+BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMGiyPINRU1mwZ4Crw01vpuPvxZxb2IOr
+yX3RNlOIu4We1H+5dQk5tIvH8KGYFbWEpAIxAO9NZ6/j9osMhLgZ0yj0WVjb+uZx
+YlZR9fyFisY/jNfX7QhSk+nrc3SFLRUNtpXrng==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEBTCCAu2gAwIBAgIRAKiaRZatN8eiz9p0s0lu0rQwDQYJKoZIhvcNAQELBQAw
+gZoxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEzMDEGA1UEAwwq
+QW1hem9uIFJEUyBjYS1jZW50cmFsLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYD
+VQQHDAdTZWF0dGxlMCAXDTIxMDUyMTIyMDIzNVoYDzIwNjEwNTIxMjMwMjM1WjCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIGNhLWNlbnRyYWwtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNV
+BAcMB1NlYXR0bGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCygVMf
+qB865IR9qYRBRFHn4eAqGJOCFx+UbraQZmjr/mnRqSkY+nhbM7Pn/DWOrRnxoh+w
+q5F9ZxdZ5D5T1v6kljVwxyfFgHItyyyIL0YS7e2h7cRRscCM+75kMedAP7icb4YN
+LfWBqfKHbHIOqvvQK8T6+Emu/QlG2B5LvuErrop9K0KinhITekpVIO4HCN61cuOe
+CADBKF/5uUJHwS9pWw3uUbpGUwsLBuhJzCY/OpJlDqC8Y9aToi2Ivl5u3/Q/sKjr
+6AZb9lx4q3J2z7tJDrm5MHYwV74elGSXoeoG8nODUqjgklIWAPrt6lQ3WJpO2kug
+8RhCdSbWkcXHfX95AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
+FOIxhqTPkKVqKBZvMWtKewKWDvDBMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0B
+AQsFAAOCAQEAqoItII89lOl4TKvg0I1EinxafZLXIheLcdGCxpjRxlZ9QMQUN3yb
+y/8uFKBL0otbQgJEoGhxm4h0tp54g28M6TN1U0332dwkjYxUNwvzrMaV5Na55I2Z
+1hq4GB3NMXW+PvdtsgVOZbEN+zOyOZ5MvJHEQVkT3YRnf6avsdntltcRzHJ16pJc
+Y8rR7yWwPXh1lPaPkxddrCtwayyGxNbNmRybjR48uHRhwu7v2WuAMdChL8H8bp89
+TQLMrMHgSbZfee9hKhO4Zebelf1/cslRSrhkG0ESq6G5MUINj6lMg2g6F0F7Xz2v
+ncD/vuRN5P+vT8th/oZ0Q2Gc68Pun0cn/g==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/zCCAuegAwIBAgIRAJYlnmkGRj4ju/2jBQsnXJYwDQYJKoZIhvcNAQELBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyB1cy1lYXN0LTIgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyMTIzMDQ0NFoYDzIwNjEwNTIyMDAwNDQ0WjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIHVzLWVhc3QtMiBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC74V3eigv+pCj5
+nqDBqplY0Jp16pTeNB06IKbzb4MOTvNde6QjsZxrE1xUmprT8LxQqN9tI3aDYEYk
+b9v4F99WtQVgCv3Y34tYKX9NwWQgwS1vQwnIR8zOFBYqsAsHEkeJuSqAB12AYUSd
+Zv2RVFjiFmYJho2X30IrSLQfS/IE3KV7fCyMMm154+/K1Z2IJlcissydEAwgsUHw
+edrE6CxJVkkJ3EvIgG4ugK/suxd8eEMztaQYJwSdN8TdfT59LFuSPl7zmF3fIBdJ
+//WexcQmGabaJ7Xnx+6o2HTfkP8Zzzzaq8fvjAcvA7gyFH5EP26G2ZqMG+0y4pTx
+SPVTrQEXAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIWWuNEF
+sUMOC82XlfJeqazzrkPDMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOC
+AQEAgClmxcJaQTGpEZmjElL8G2Zc8lGc+ylGjiNlSIw8X25/bcLRptbDA90nuP+q
+zXAMhEf0ccbdpwxG/P5a8JipmHgqQLHfpkvaXx+0CuP++3k+chAJ3Gk5XtY587jX
++MJfrPgjFt7vmMaKmynndf+NaIJAYczjhJj6xjPWmGrjM3MlTa9XesmelMwP3jep
+bApIWAvCYVjGndbK9byyMq1nyj0TUzB8oJZQooaR3MMjHTmADuVBylWzkRMxbKPl
+4Nlsk4Ef1JvIWBCzsMt+X17nuKfEatRfp3c9tbpGlAE/DSP0W2/Lnayxr4RpE9ds
+ICF35uSis/7ZlsftODUe8wtpkQ==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjOgAwIBAgIQS7vMpOTVq2Jw457NdZ2ffjAKBggqhkjOPQQDAzCBljEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMS8wLQYDVQQDDCZBbWF6
+b24gUkRTIGNhLXdlc3QtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTAgFw0yMzA5MTkyMjExNDNaGA8yMTIzMDkxOTIzMTE0M1owgZYxCzAJBgNV
+BAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYD
+VQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1hem9uIFJE
+UyBjYS13ZXN0LTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1NlYXR0bGUw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAARdgGSs/F2lpWKqS1ZpcmatFED1JurmNbXG
+Sqhv1A/geHrKCS15MPwjtnfZiujYKY4fNkCCUseoGDwkC4281nwkokvnfWR1/cXy
+LxfACoXNxsI4b+37CezSUBl48/5p1/OjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFFhLokGBuJGwKJhZcYSYKyZIitJtMA4GA1UdDwEB/wQEAwIBhjAKBggq
+hkjOPQQDAwNpADBmAjEA8aQQlzJRHbqFsRY4O3u/cN0T8dzjcqnYn4NV1w+jvhzt
+QPJLB+ggGyQhoFR6G2UrAjEA0be8OP5MWXD8d01KKbo5Dpy6TwukF5qoJmkFJKS3
+bKfEMvFWxXoV06HNZFWdI80u
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/zCCA+egAwIBAgIRAPvvd+MCcp8E36lHziv0xhMwDQYJKoZIhvcNAQEMBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyB1cy1lYXN0LTIgUm9vdCBDQSBSU0E0MDk2IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyMTIzMTEwNloYDzIxMjEwNTIyMDAxMTA2WjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIHVzLWVhc3QtMiBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDbvwekKIKGcV/s
+lDU96a71ZdN2pTYkev1X2e2/ICb765fw/i1jP9MwCzs8/xHBEQBJSxdfO4hPeNx3
+ENi0zbM+TrMKliS1kFVe1trTTEaHYjF8BMK9yTY0VgSpWiGxGwg4tshezIA5lpu8
+sF6XMRxosCEVCxD/44CFqGZTzZaREIvvFPDTXKJ6yOYnuEkhH3OcoOajHN2GEMMQ
+ShuyRFDQvYkqOC/Q5icqFbKg7eGwfl4PmimdV7gOVsxSlw2s/0EeeIILXtHx22z3
+8QBhX25Lrq2rMuaGcD3IOMBeBo2d//YuEtd9J+LGXL9AeOXHAwpvInywJKAtXTMq
+Wsy3LjhuANFrzMlzjR2YdjkGVzeQVx3dKUzJ2//Qf7IXPSPaEGmcgbxuatxjnvfT
+H85oeKr3udKnXm0Kh7CLXeqJB5ITsvxI+Qq2iXtYCc+goHNR01QJwtGDSzuIMj3K
+f+YMrqBXZgYBwU2J/kCNTH31nfw96WTbOfNGwLwmVRDgguzFa+QzmQsJW4FTDMwc
+7cIjwdElQQVA+Gqa67uWmyDKAnoTkudmgAP+OTBkhnmc6NJuZDcy6f/iWUdl0X0u
+/tsfgXXR6ZovnHonM13ANiN7VmEVqFlEMa0VVmc09m+2FYjjlk8F9sC7Rc4wt214
+7u5YvCiCsFZwx44baP5viyRZgkJVpQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBQgCZCsc34nVTRbWsniXBPjnUTQ2DAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQEMBQADggIBAAQas3x1G6OpsIvQeMS9BbiHG3+kU9P/ba6Rrg+E
+lUz8TmL04Bcd+I+R0IyMBww4NznT+K60cFdk+1iSmT8Q55bpqRekyhcdWda1Qu0r
+JiTi7zz+3w2v66akofOnGevDpo/ilXGvCUJiLOBnHIF0izUqzvfczaMZGJT6xzKq
+PcEVRyAN1IHHf5KnGzUlVFv9SGy47xJ9I1vTk24JU0LWkSLzMMoxiUudVmHSqJtN
+u0h+n/x3Q6XguZi1/C1KOntH56ewRh8n5AF7c+9LJJSRM9wunb0Dzl7BEy21Xe9q
+03xRYjf5wn8eDELB8FZPa1PrNKXIOLYM9egdctbKEcpSsse060+tkyBrl507+SJT
+04lvJ4tcKjZFqxn+bUkDQvXYj0D3WK+iJ7a8kZJPRvz8BDHfIqancY8Tgw+69SUn
+WqIb+HNZqFuRs16WFSzlMksqzXv6wcDSyI7aZOmCGGEcYW9NHk8EuOnOQ+1UMT9C
+Qb1GJcipjRzry3M4KN/t5vN3hIetB+/PhmgTO4gKhBETTEyPC3HC1QbdVfRndB6e
+U/NF2U/t8U2GvD26TTFLK4pScW7gyw4FQyXWs8g8FS8f+R2yWajhtS9++VDJQKom
+fAUISoCH+PlPRJpu/nHd1Zrddeiiis53rBaLbXu2J1Q3VqjWOmtj0HjxJJxWnYmz
+Pqj2
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGATCCA+mgAwIBAgIRAI/U4z6+GF8/znpHM8Dq8G0wDQYJKoZIhvcNAQEMBQAw
+gZgxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwo
+QW1hem9uIFJEUyBhcC1zb3V0aC0yIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTAgFw0yMjA2MDYyMTQ4MThaGA8yMTIyMDYwNjIyNDgxOFowgZgx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwoQW1h
+em9uIFJEUyBhcC1zb3V0aC0yIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwH
+U2VhdHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK5WqMvyq888
+3uuOtEj1FcP6iZhqO5kJurdJF59Otp2WCg+zv6I+QwaAspEWHQsKD405XfFsTGKV
+SKTCwoMxwBniuChSmyhlagQGKSnRY9+znOWq0v7hgmJRwp6FqclTbubmr+K6lzPy
+hs86mEp68O5TcOTYWUlPZDqfKwfNTbtCl5YDRr8Gxb5buHmkp6gUSgDkRsXiZ5VV
+b3GBmXRqbnwo5ZRNAzQeM6ylXCn4jKs310lQGUrFbrJqlyxUdfxzqdlaIRn2X+HY
+xRSYbHox3LVNPpJxYSBRvpQVFSy9xbX8d1v6OM8+xluB31cbLBtm08KqPFuqx+cO
+I2H5F0CYqYzhyOSKJsiOEJT6/uH4ewryskZzncx9ae62SC+bB5n3aJLmOSTkKLFY
+YS5IsmDT2m3iMgzsJNUKVoCx2zihAzgBanFFBsG+Xmoq0aKseZUI6vd2qpd5tUST
+/wS1sNk0Ph7teWB2ACgbFE6etnJ6stwjHFZOj/iTYhlnR2zDRU8akunFdGb6CB4/
+hMxGJxaqXSJeGtHm7FpadlUTf+2ESbYcVW+ui/F8sdBJseQdKZf3VdZZMgM0bcaX
+NE47cauDTy72WdU9YJX/YXKYMLDE0iFHTnGpfVGsuWGPYhlwZ3dFIO07mWnCRM6X
+u5JXRB1oy5n5HRluMsmpSN/R92MeBxKFAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wHQYDVR0OBBYEFNtH0F0xfijSLHEyIkRGD9gW6NazMA4GA1UdDwEB/wQEAwIB
+hjANBgkqhkiG9w0BAQwFAAOCAgEACo+5jFeY3ygxoDDzL3xpfe5M0U1WxdKk+az4
+/OfjZvkoma7WfChi3IIMtwtKLYC2/seKWA4KjlB3rlTsCVNPnK6D+gAnybcfTKk/
+IRSPk92zagwQkSUWtAk80HpVfWJzpkSU16ejiajhedzOBRtg6BwsbSqLCDXb8hXr
+eXWC1S9ZceGc+LcKRHewGWPu31JDhHE9bNcl9BFSAS0lYVZqxIRWxivZ+45j5uQv
+wPrC8ggqsdU3K8quV6dblUQzzA8gKbXJpCzXZihkPrYpQHTH0szvXvgebh+CNUAG
+rUxm8+yTS0NFI3U+RLbcLFVzSvjMOnEwCX0SPj5XZRYYXs5ajtQCoZhTUkkwpDV8
+RxXk8qGKiXwUxDO8GRvmvM82IOiXz5w2jy/h7b7soyIgdYiUydMq4Ja4ogB/xPZa
+gf4y0o+bremO15HFf1MkaU2UxPK5FFVUds05pKvpSIaQWbF5lw4LHHj4ZtVup7zF
+CLjPWs4Hs/oUkxLMqQDw0FBwlqa4uot8ItT8uq5BFpz196ZZ+4WXw5PVzfSxZibI
+C/nwcj0AS6qharXOs8yPnPFLPSZ7BbmWzFDgo3tpglRqo3LbSPsiZR+sLeivqydr
+0w4RK1btRda5Ws88uZMmW7+2aufposMKcbAdrApDEAVzHijbB/nolS5nsnFPHZoA
+KDPtFEk=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICtzCCAj2gAwIBAgIQVZ5Y/KqjR4XLou8MCD5pOjAKBggqhkjOPQQDAzCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLXNvdXRoZWFzdC00IFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIyMDUyNTE2NTgzM1oYDzIxMjIwNTI1MTc1ODMzWjCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLXNvdXRoZWFzdC00IFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEbo473OmpD5vkckdJajXg
+brhmNFyoSa0WCY1njuZC2zMFp3zP6rX4I1r3imrYnJd9pFH/aSiV/r6L5ACE5RPx
+4qdg5SQ7JJUaZc3DWsTOiOed7BCZSzM+KTYK/2QzDMApo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBTmogc06+1knsej1ltKUOdWFvwgsjAOBgNVHQ8BAf8E
+BAMCAYYwCgYIKoZIzj0EAwMDaAAwZQIxAIs7TlLMbGTWNXpGiKf9DxaM07d/iDHe
+F/Vv/wyWSTGdobxBL6iArQNVXz0Gr4dvPAIwd0rsoa6R0x5mtvhdRPtM37FYrbHJ
+pbV+OMusQqcSLseunLBoCHenvJW0QOCQ8EDY
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGBTCCA+2gAwIBAgIRAO9dVdiLTEGO8kjUFExJmgowDQYJKoZIhvcNAQEMBQAw
+gZoxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEzMDEGA1UEAwwq
+QW1hem9uIFJEUyBpbC1jZW50cmFsLTEgUm9vdCBDQSBSU0E0MDk2IEcxMRAwDgYD
+VQQHDAdTZWF0dGxlMCAXDTIyMTIwMjIwMjYwOFoYDzIxMjIxMjAyMjEyNjA4WjCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIGlsLWNlbnRyYWwtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNV
+BAcMB1NlYXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDkVHmJ
+bUc8CNDGBcgPmXHSHj5dS1PDnnpk3doCu6pahyYXW8tqAOmOqsDuNz48exY7YVy4
+u9I9OPBeTYB9ZUKwxq+1ZNLsr1cwVz5DdOyDREVFOjlU4rvw0eTgzhP5yw/d+Ai/
++WmPebZG0irwPKN2f60W/KJ45UNtR+30MT8ugfnPuSHWjjV+dqCOCp/mj8nOCckn
+k8GoREwjuTFJMKInpQUC0BaVVX6LiIdgtoLY4wdx00EqNBuROoRTAvrked0jvm7J
+UI39CSYxhNZJ9F6LdESZXjI4u2apfNQeSoy6WptxFHr+kh2yss1B2KT6lbwGjwWm
+l9HODk9kbBNSy2NeewAms36q+p8wSLPavL28IRfK0UaBAiN1hr2a/2RDGCwOJmw6
+5erRC5IIX5kCStyXPEGhVPp18EvMuBd37eLIxjZBBO8AIDf4Ue8QmxSeZH0cT204
+3/Bd6XR6+Up9iMTxkHr1URcL1AR8Zd62lg/lbEfxePNMK9mQGxKP8eTMG5AjtW9G
+TatEoRclgE0wZQalXHmKpBNshyYdGqQZhzL1MxCxWzfHNgZkTKIsdzxrjnP7RiBR
+jdRH0YhXn6Y906QfLwMCaufwfQ5J8+nj/tu7nG138kSxsu6VUkhnQJhUcUsxuHD/
+NnBx0KGVEldtZiZf7ccgtRVp1lA0OrVtq3ZLMQIDAQABo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBQ2WC3p8rWeE2N0S4Om01KsNLpk/jAOBgNVHQ8BAf8E
+BAMCAYYwDQYJKoZIhvcNAQEMBQADggIBAFFEVDt45Obr6Ax9E4RMgsKjj4QjMFB9
+wHev1jL7hezl/ULrHuWxjIusaIZEIcKfn+v2aWtqOq13P3ht7jV5KsV29CmFuCdQ
+q3PWiAXVs+hnMskTOmGMDnptqd6/UuSIha8mlOKKAvnmRQJvfX9hIfb/b/mVyKWD
+uvTTmcy3cOTJY5ZIWGyzuvmcqA0YNcb7rkJt/iaLq4RX3/ofq4y4w36hefbcvj++
+pXHOmXk3dAej3y6SMBOUcGMyCJcCluRPNYKDTLn+fitcPxPC3JG7fI5bxQ0D6Hpa
+qbyGBQu96sfahQyMc+//H8EYlo4b0vPeS5RFFXJS/VBf0AyNT4vVc7H17Q6KjeNp
+wEARqsIa7UalHx9MnxrQ/LSTTxiC8qmDkIFuQtw8iQMN0SoL5S0eCZNRD31awgaY
+y1PvY8JMN549ugIUjOXnown/OxharLW1evWUraU5rArq3JfeFpPXl4K/u10T5SCL
+iJRoxFilGPMFE3hvnmbi5rEy8wRUn7TpLb4I4s/CB/lT2qZTPqvQHwxKCnMm9BKF
+NHb4rLL5dCvUi5NJ6fQ/exOoGdOVSfT7jqFeq2TtNunERSz9vpriweliB6iIe1Al
+Thj8aEs1GqA764rLVGA+vUe18NhjJm9EemrdIzjSQFy/NdbN/DMaHqEzJogWloAI
+izQWYnCS19TJ
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICvTCCAkOgAwIBAgIQCIY7E/bFvFN2lK9Kckb0dTAKBggqhkjOPQQDAzCBnjEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTcwNQYDVQQDDC5BbWF6
+b24gUkRTIFByZXZpZXcgdXMtZWFzdC0yIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYD
+VQQHDAdTZWF0dGxlMCAXDTIxMDUxODIxMDUxMFoYDzIxMjEwNTE4MjIwNTEwWjCB
+njELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTcwNQYDVQQDDC5B
+bWF6b24gUkRTIFByZXZpZXcgdXMtZWFzdC0yIFJvb3QgQ0EgRUNDMzg0IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEMI0hzf1JCEOI
+Eue4+DmcNnSs2i2UaJxHMrNGGfU7b42a7vwP53F7045ffHPBGP4jb9q02/bStZzd
+VHqfcgqkSRI7beBKjD2mfz82hF/wJSITTgCLs+NRpS6zKMFOFHUNo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBS8uF/6hk5mPLH4qaWv9NVZaMmyTjAOBgNV
+HQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMDaAAwZQIxAO7Pu9wzLyM0X7Q08uLIL+vL
+qaxe3UFuzFTWjM16MLJHbzLf1i9IDFKz+Q4hXCSiJwIwClMBsqT49BPUxVsJnjGr
+EbyEk6aOOVfY1p2yQL649zh3M4h8okLnwf+bYIb1YpeU
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIQY+JhwFEQTe36qyRlUlF8ozANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChB
+bWF6b24gUkRTIGFmLXNvdXRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUxOTE5MjQxNloYDzIwNjEwNTE5MjAyNDE2WjCBmDEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChBbWF6
+b24gUkRTIGFmLXNvdXRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnIye77j6ev40
+8wRPyN2OdKFSUfI9jB20Or2RLO+RDoL43+USXdrze0Wv4HMRLqaen9BcmCfaKMp0
+E4SFo47bXK/O17r6G8eyq1sqnHE+v288mWtYH9lAlSamNFRF6YwA7zncmE/iKL8J
+0vePHMHP/B6svw8LULZCk+nZk3tgxQn2+r0B4FOz+RmpkoVddfqqUPMbKUxhM2wf
+fO7F6bJaUXDNMBPhCn/3ayKCjYr49ErmnpYV2ZVs1i34S+LFq39J7kyv6zAgbHv9
++/MtRMoRB1CjpqW0jIOZkHBdYcd1o9p1zFn591Do1wPkmMsWdjIYj+6e7UXcHvOB
+2+ScIRAcnwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBQGtq2W
+YSyMMxpdQ3IZvcGE+nyZqTAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBAEgoP3ixJsKSD5FN8dQ01RNHERl/IFbA7TRXfwC+L1yFocKnQh4Mp/msPRSV
++OeHIvemPW/wtZDJzLTOFJ6eTolGekHK1GRTQ6ZqsWiU2fmiOP8ks4oSpI+tQ9Lw
+VrfZqTiEcS5wEIqyfUAZZfKDo7W1xp+dQWzfczSBuZJZwI5iaha7+ILM0r8Ckden
+TVTapc5pLSoO15v0ziRuQ2bT3V3nwu/U0MRK44z+VWOJdSiKxdnOYDs8hFNnKhfe
+klbTZF7kW7WbiNYB43OaAQBJ6BALZsIskEaqfeZT8FD71uN928TcEQyBDXdZpRN+
+iGQZDGhht0r0URGMDSs9waJtTfA=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/jCCA+agAwIBAgIQXY/dmS+72lZPranO2JM9jjANBgkqhkiG9w0BAQwFADCB
+lzELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdB
+bWF6b24gUkRTIGFwLWVhc3QtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjEwNTI1MjEzNDUxWhgPMjEyMTA1MjUyMjM0NTFaMIGXMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpv
+biBSRFMgYXAtZWFzdC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMyW9kBJjD/hx8e8
+b5E1sF42bp8TXsz1htSYE3Tl3T1Aq379DfEhB+xa/ASDZxt7/vwa81BkNo4M6HYq
+okYIXeE7cu5SnSgjWXqcERhgPevtAwgmhdE3yREe8oz2DyOi2qKKZqah+1gpPaIQ
+fK0uAqoeQlyHosye3KZZKkDHBatjBsQ5kf8lhuf7wVulEZVRHY2bP2X7N98PfbpL
+QdH7mWXzDtJJ0LiwFwds47BrkgK1pkHx2p1mTo+HMkfX0P6Fq1atkVC2RHHtbB/X
+iYyH7paaHBzviFrhr679zNqwXIOKlbf74w3mS11P76rFn9rS1BAH2Qm6eY5S/Fxe
+HEKXm4kjPN63Zy0p3yE5EjPt54yPkvumOnT+RqDGJ2HCI9k8Ehcbve0ogfdRKNqQ
+VHWYTy8V33ndQRHZlx/CuU1yN61TH4WSoMly1+q1ihTX9sApmlQ14B2pJi/9DnKW
+cwECrPy1jAowC2UJ45RtC8UC05CbP9yrIy/7Noj8gQDiDOepm+6w1g6aNlWoiuQS
+kyI6nzz1983GcnOHya73ga7otXo0Qfg9jPghlYiMomrgshlSLDHZG0Ib/3hb8cnR
+1OcN9FpzNmVK2Ll1SmTMLrIhuCkyNYX9O/bOknbcf706XeESxGduSkHEjIw/k1+2
+Atteoq5dT6cwjnJ9hyhiueVlVkiDAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8w
+HQYDVR0OBBYEFLUI+DD7RJs+0nRnjcwIVWzzYSsFMA4GA1UdDwEB/wQEAwIBhjAN
+BgkqhkiG9w0BAQwFAAOCAgEAb1mcCHv4qMQetLGTBH9IxsB2YUUhr5dda0D2BcHr
+UtDbfd0VQs4tux6h/6iKwHPx0Ew8fuuYj99WknG0ffgJfNc5/fMspxR/pc1jpdyU
+5zMQ+B9wi0lOZPO9uH7/pr+d2odcNEy8zAwqdv/ihsTwLmGP54is9fVbsgzNW1cm
+HKAVL2t/Ope+3QnRiRilKCN1lzhav4HHdLlN401TcWRWKbEuxF/FgxSO2Hmx86pj
+e726lweCTMmnq/cTsPOVY0WMjs0or3eHDVlyLgVeV5ldyN+ptg3Oit60T05SRa58
+AJPTaVKIcGQ/gKkKZConpu7GDofT67P/ox0YNY57LRbhsx9r5UY4ROgz7WMQ1yoS
+Y+19xizm+mBm2PyjMUbfwZUyCxsdKMwVdOq5/UmTmdms+TR8+m1uBHPOTQ2vKR0s
+Pd/THSzPuu+d3dbzRyDSLQbHFFneG760CUlD/ZmzFlQjJ89/HmAmz8IyENq+Sjhx
+Jgzy+FjVZb8aRUoYLlnffpUpej1n87Ynlr1GrvC4GsRpNpOHlwuf6WD4W0qUTsC/
+C9JO+fBzUj/aWlJzNcLEW6pte1SB+EdkR2sZvWH+F88TxemeDrV0jKJw5R89CDf8
+ZQNfkxJYjhns+YeV0moYjqQdc7tq4i04uggEQEtVzEhRLU5PE83nlh/K2NZZm8Kj
+dIA=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/zCCAuegAwIBAgIRAPVSMfFitmM5PhmbaOFoGfUwDQYJKoZIhvcNAQELBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyB1cy1lYXN0LTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyNTIyMzQ1N1oYDzIwNjEwNTI1MjMzNDU3WjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIHVzLWVhc3QtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDu9H7TBeGoDzMr
+dxN6H8COntJX4IR6dbyhnj5qMD4xl/IWvp50lt0VpmMd+z2PNZzx8RazeGC5IniV
+5nrLg0AKWRQ2A/lGGXbUrGXCSe09brMQCxWBSIYe1WZZ1iU1IJ/6Bp4D2YEHpXrW
+bPkOq5x3YPcsoitgm1Xh8ygz6vb7PsvJvPbvRMnkDg5IqEThapPjmKb8ZJWyEFEE
+QRrkCIRueB1EqQtJw0fvP4PKDlCJAKBEs/y049FoOqYpT3pRy0WKqPhWve+hScMd
+6obq8kxTFy1IHACjHc51nrGII5Bt76/MpTWhnJIJrCnq1/Uc3Qs8IVeb+sLaFC8K
+DI69Sw6bAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE7PCopt
+lyOgtXX0Y1lObBUxuKaCMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOC
+AQEAFj+bX8gLmMNefr5jRJfHjrL3iuZCjf7YEZgn89pS4z8408mjj9z6Q5D1H7yS
+jNETVV8QaJip1qyhh5gRzRaArgGAYvi2/r0zPsy+Tgf7v1KGL5Lh8NT8iCEGGXwF
+g3Ir+Nl3e+9XUp0eyyzBIjHtjLBm6yy8rGk9p6OtFDQnKF5OxwbAgip42CD75r/q
+p421maEDDvvRFR4D+99JZxgAYDBGqRRceUoe16qDzbMvlz0A9paCZFclxeftAxv6
+QlR5rItMz/XdzpBJUpYhdzM0gCzAzdQuVO5tjJxmXhkSMcDP+8Q+Uv6FA9k2VpUV
+E/O5jgpqUJJ2Hc/5rs9VkAPXeA==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrzCCAjWgAwIBAgIQW0yuFCle3uj4vWiGU0SaGzAKBggqhkjOPQQDAzCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGFmLXNvdXRoLTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTE5MTkzNTE2WhgPMjEyMTA1MTkyMDM1MTZaMIGXMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpvbiBS
+RFMgYWYtc291dGgtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2VhdHRs
+ZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDPiKNZSaXs3Un/J/v+LTsFDANHpi7en
+oL2qh0u0DoqNzEBTbBjvO23bLN3k599zh6CY3HKW0r2k1yaIdbWqt4upMCRCcUFi
+I4iedAmubgzh56wJdoMZztjXZRwDthTkJKNCMEAwDwYDVR0TAQH/BAUwAwEB/zAd
+BgNVHQ4EFgQUWbYkcrvVSnAWPR5PJhIzppcAnZIwDgYDVR0PAQH/BAQDAgGGMAoG
+CCqGSM49BAMDA2gAMGUCMCESGqpat93CjrSEjE7z+Hbvz0psZTHwqaxuiH64GKUm
+mYynIiwpKHyBrzjKBmeDoQIxANGrjIo6/b8Jl6sdIZQI18V0pAyLfLiZjlHVOnhM
+MOTVgr82ZuPoEHTX78MxeMnYlw==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIRAIbsx8XOl0sgTNiCN4O+18QwDQYJKoZIhvcNAQELBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1ub3J0aGVhc3QtMSBSb290IENBIFJTQTIwNDggRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNTI1MjE1NDU4WhgPMjA2MTA1MjUyMjU0NTha
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtbm9ydGhlYXN0LTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
+tROxwXWCgn5R9gI/2Ivjzaxc0g95ysBjoJsnhPdJEHQb7w3y2kWrVWU3Y9fOitgb
+CEsnEC3PrhRnzNVW0fPsK6kbvOeCmjvY30rdbxbc8h+bjXfGmIOgAkmoULEr6Hc7
+G1Q/+tvv4lEwIs7bEaf+abSZxRJbZ0MBxhbHn7UHHDiMZYvzK+SV1MGCxx7JVhrm
+xWu3GC1zZCsGDhB9YqY9eR6PmjbqA5wy8vqbC57dZZa1QVtWIQn3JaRXn+faIzHx
+nLMN5CEWihsdmHBXhnRboXprE/OS4MFv1UrQF/XM/h5RBeCywpHePpC+Oe1T3LNC
+iP8KzRFrjC1MX/WXJnmOVQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBS33XbXAUMs1znyZo4B0+B3D68WFTAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZI
+hvcNAQELBQADggEBADuadd2EmlpueY2VlrIIPC30QkoA1EOSoCmZgN6124apkoY1
+HiV4r+QNPljN4WP8gmcARnNkS7ZeR4fvWi8xPh5AxQCpiaBMw4gcbTMCuKDV68Pw
+P2dZCTMspvR3CDfM35oXCufdtFnxyU6PAyINUqF/wyTHguO3owRFPz64+sk3r2pT
+WHmJjG9E7V+KOh0s6REgD17Gqn6C5ijLchSrPUHB0wOIkeLJZndHxN/76h7+zhMt
+fFeNxPWHY2MfpcaLjz4UREzZPSB2U9k+y3pW1omCIcl6MQU9itGx/LpQE+H3ZeX2
+M2bdYd5L+ow+bdbGtsVKOuN+R9Dm17YpswF+vyQ=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGATCCA+mgAwIBAgIRAKlQ+3JX9yHXyjP/Ja6kZhkwDQYJKoZIhvcNAQEMBQAw
+gZgxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwo
+QW1hem9uIFJEUyBhcC1zb3V0aC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTAgFw0yMTA1MTkxNzQ1MjBaGA8yMTIxMDUxOTE4NDUyMFowgZgx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwoQW1h
+em9uIFJEUyBhcC1zb3V0aC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwH
+U2VhdHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKtahBrpUjQ6
+H2mni05BAKU6Z5USPZeSKmBBJN3YgD17rJ93ikJxSgzJ+CupGy5rvYQ0xznJyiV0
+91QeQN4P+G2MjGQR0RGeUuZcfcZitJro7iAg3UBvw8WIGkcDUg+MGVpRv/B7ry88
+7E4OxKb8CPNoa+a9j6ABjOaaxaI22Bb7j3OJ+JyMICs6CU2bgkJaj3VUV9FCNUOc
+h9PxD4jzT9yyGYm/sK9BAT1WOTPG8XQUkpcFqy/IerZDfiQkf1koiSd4s5VhBkUn
+aQHOdri/stldT7a+HJFVyz2AXDGPDj+UBMOuLq0K6GAT6ThpkXCb2RIf4mdTy7ox
+N5BaJ+ih+Ro3ZwPkok60egnt/RN98jgbm+WstgjJWuLqSNInnMUgkuqjyBWwePqX
+Kib+wdpyx/LOzhKPEFpeMIvHQ3A0sjlulIjnh+j+itezD+dp0UNxMERlW4Bn/IlS
+sYQVNfYutWkRPRLErXOZXtlxxkI98JWQtLjvGzQr+jywxTiw644FSLWdhKa6DtfU
+2JWBHqQPJicMElfZpmfaHZjtXuCZNdZQXWg7onZYohe281ZrdFPOqC4rUq7gYamL
+T+ZB+2P+YCPOLJ60bj/XSvcB7mesAdg8P0DNddPhHUFWx2dFqOs1HxIVB4FZVA9U
+Ppbv4a484yxjTgG7zFZNqXHKTqze6rBBAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wHQYDVR0OBBYEFCEAqjighncv/UnWzBjqu1Ka2Yb4MA4GA1UdDwEB/wQEAwIB
+hjANBgkqhkiG9w0BAQwFAAOCAgEAYyvumblckIXlohzi3QiShkZhqFzZultbFIu9
+GhA5CDar1IFMhJ9vJpO9nUK/camKs1VQRs8ZsBbXa0GFUM2p8y2cgUfLwFULAiC/
+sWETyW5lcX/xc4Pyf6dONhqFJt/ovVBxNZtcmMEWv/1D6Tf0nLeEb0P2i/pnSRR4
+Oq99LVFjossXtyvtaq06OSiUUZ1zLPvV6AQINg8dWeBOWRcQYhYcEcC2wQ06KShZ
+0ahuu7ar5Gym3vuLK6nH+eQrkUievVomN/LpASrYhK32joQ5ypIJej3sICIgJUEP
+UoeswJ+Z16f3ECoL1OSnq4A0riiLj1ZGmVHNhM6m/gotKaHNMxsK9zsbqmuU6IT/
+P6cR0S+vdigQG8ZNFf5vEyVNXhl8KcaJn6lMD/gMB2rY0qpaeTg4gPfU5wcg8S4Y
+C9V//tw3hv0f2n+8kGNmqZrylOQDQWSSo8j8M2SRSXiwOHDoTASd1fyBEIqBAwzn
+LvXVg8wQd1WlmM3b0Vrsbzltyh6y4SuKSkmgufYYvC07NknQO5vqvZcNoYbLNea3
+76NkFaMHUekSbwVejZgG5HGwbaYBgNdJEdpbWlA3X4yGRVxknQSUyt4dZRnw/HrX
+k8x6/wvtw7wht0/DOqz1li7baSsMazqxx+jDdSr1h9xML416Q4loFCLgqQhil8Jq
+Em4Hy3A=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEBDCCAuygAwIBAgIQFn6AJ+uxaPDpNVx7174CpjANBgkqhkiG9w0BAQsFADCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIGlsLWNlbnRyYWwtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNV
+BAcMB1NlYXR0bGUwIBcNMjIxMjAyMjAxNDA4WhgPMjA2MjEyMDIyMTE0MDhaMIGa
+MQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5j
+LjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMzAxBgNVBAMMKkFt
+YXpvbiBSRFMgaWwtY2VudHJhbC0xIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL2xGTSJ
+fXorki/dkkTqdLyv4U1neeFYEyUCPN/HJ7ZloNwhj8RBrHYhZ4qtvUAvN+rs8fUm
+L0wmaL69ye61S+CSfDzNwBDGwOzUm/cc1NEJOHCm8XA0unBNBvpJTjsFk2LQ+rz8
+oU0lVV4mjnfGektrTDeADonO1adJvUTYmF6v1wMnykSkp8AnW9EG/6nwcAJuAJ7d
+BfaLThm6lfxPdsBNG81DLKi2me2TLQ4yl+vgRKJi2fJWwA77NaDqQuD5upRIcQwt
+5noJt2kFFmeiro98ZMMRaDTHAHhJfWkwkw5f2QNIww7T4r85IwbQCgJVRo4m4ZTC
+W/1eiEccU2407mECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+DNhVvGHzKXv0Yh6asK0apP9jJlUwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+CwUAA4IBAQCoEVTUY/rF9Zrlpb1Y1hptEguw0i2pCLakcmv3YNj6thsubbGeGx8Z
+RjUA/gPKirpoae2HU1y64WEu7akwr6pdTRtXXjbe9NReT6OW/0xAwceSXCOiStqS
+cMsWWTGg6BA3uHqad5clqITjDZr1baQ8X8en4SXRBxXyhJXbOkB60HOQeFR9CNeh
+pJdrWLeNYXwU0Z59juqdVMGwvDAYdugWUhW2rhafVUXszfRA5c8Izc+E31kq90aY
+LmxFXUHUfG0eQOmxmg+Z/nG7yLUdHIFA3id8MRh22hye3KvRdQ7ZVGFni0hG2vQQ
+Q01AvD/rhzyjg0czzJKLK9U/RttwdMaV
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGBTCCA+2gAwIBAgIRAJfKe4Zh4aWNt3bv6ZjQwogwDQYJKoZIhvcNAQEMBQAw
+gZoxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEzMDEGA1UEAwwq
+QW1hem9uIFJEUyBjYS1jZW50cmFsLTEgUm9vdCBDQSBSU0E0MDk2IEcxMRAwDgYD
+VQQHDAdTZWF0dGxlMCAXDTIxMDUyMTIyMDg1M1oYDzIxMjEwNTIxMjMwODUzWjCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIGNhLWNlbnRyYWwtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNV
+BAcMB1NlYXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCpgUH6
+Crzd8cOw9prAh2rkQqAOx2vtuI7xX4tmBG4I/um28eBjyVmgwQ1fpq0Zg2nCKS54
+Nn0pCmT7f3h6Bvopxn0J45AzXEtajFqXf92NQ3iPth95GVfAJSD7gk2LWMhpmID9
+JGQyoGuDPg+hYyr292X6d0madzEktVVGO4mKTF989qEg+tY8+oN0U2fRTrqa2tZp
+iYsmg350ynNopvntsJAfpCO/srwpsqHHLNFZ9jvhTU8uW90wgaKO9i31j/mHggCE
++CAOaJCM3g+L8DPl/2QKsb6UkBgaaIwKyRgKSj1IlgrK+OdCBCOgM9jjId4Tqo2j
+ZIrrPBGl6fbn1+etZX+2/tf6tegz+yV0HHQRAcKCpaH8AXF44bny9andslBoNjGx
+H6R/3ib4FhPrnBMElzZ5i4+eM/cuPC2huZMBXb/jKgRC/QN1Wm3/nah5FWq+yn+N
+tiAF10Ga0BYzVhHDEwZzN7gn38bcY5yi/CjDUNpY0OzEe2+dpaBKPlXTaFfn9Nba
+CBmXPRF0lLGGtPeTAgjcju+NEcVa82Ht1pqxyu2sDtbu3J5bxp4RKtj+ShwN8nut
+Tkf5Ea9rSmHEY13fzgibZlQhXaiFSKA2ASUwgJP19Putm0XKlBCNSGCoECemewxL
++7Y8FszS4Uu4eaIwvXVqUEE2yf+4ex0hqQ1acQIDAQABo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBSeUnXIRxNbYsZLtKomIz4Y1nOZEzAOBgNVHQ8BAf8E
+BAMCAYYwDQYJKoZIhvcNAQEMBQADggIBAIpRvxVS0dzoosBh/qw65ghPUGSbP2D4
+dm6oYCv5g/zJr4fR7NzEbHOXX5aOQnHbQL4M/7veuOCLNPOW1uXwywMg6gY+dbKe
+YtPVA1as8G9sUyadeXyGh2uXGsziMFXyaESwiAXZyiYyKChS3+g26/7jwECFo5vC
+XGhWpIO7Hp35Yglp8AnwnEAo/PnuXgyt2nvyTSrxlEYa0jus6GZEZd77pa82U1JH
+qFhIgmKPWWdvELA3+ra1nKnvpWM/xX0pnMznMej5B3RT3Y+k61+kWghJE81Ix78T
++tG4jSotgbaL53BhtQWBD1yzbbilqsGE1/DXPXzHVf9yD73fwh2tGWSaVInKYinr
+a4tcrB3KDN/PFq0/w5/21lpZjVFyu/eiPj6DmWDuHW73XnRwZpHo/2OFkei5R7cT
+rn/YdDD6c1dYtSw5YNnS6hdCQ3sOiB/xbPRN9VWJa6se79uZ9NLz6RMOr73DNnb2
+bhIR9Gf7XAA5lYKqQk+A+stoKbIT0F65RnkxrXi/6vSiXfCh/bV6B41cf7MY/6YW
+ehserSdjhQamv35rTFdM+foJwUKz1QN9n9KZhPxeRmwqPitAV79PloksOnX25ElN
+SlyxdndIoA1wia1HRd26EFm2pqfZ2vtD2EjU3wD42CXX4H8fKVDna30nNFSYF0yn
+jGKc3k6UNxpg
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/jCCA+agAwIBAgIQaRHaEqqacXN20e8zZJtmDDANBgkqhkiG9w0BAQwFADCB
+lzELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdB
+bWF6b24gUkRTIHVzLWVhc3QtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjEwNTI1MjIzODM1WhgPMjEyMTA1MjUyMzM4MzVaMIGXMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpv
+biBSRFMgdXMtZWFzdC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAInfBCaHuvj6Rb5c
+L5Wmn1jv2PHtEGMHm+7Z8dYosdwouG8VG2A+BCYCZfij9lIGszrTXkY4O7vnXgru
+JUNdxh0Q3M83p4X+bg+gODUs3jf+Z3Oeq7nTOk/2UYvQLcxP4FEXILxDInbQFcIx
+yen1ESHggGrjEodgn6nbKQNRfIhjhW+TKYaewfsVWH7EF2pfj+cjbJ6njjgZ0/M9
+VZifJFBgat6XUTOf3jwHwkCBh7T6rDpgy19A61laImJCQhdTnHKvzTpxcxiLRh69
+ZObypR7W04OAUmFS88V7IotlPmCL8xf7kwxG+gQfvx31+A9IDMsiTqJ1Cc4fYEKg
+bL+Vo+2Ii4W2esCTGVYmHm73drznfeKwL+kmIC/Bq+DrZ+veTqKFYwSkpHRyJCEe
+U4Zym6POqQ/4LBSKwDUhWLJIlq99bjKX+hNTJykB+Lbcx0ScOP4IAZQoxmDxGWxN
+S+lQj+Cx2pwU3S/7+OxlRndZAX/FKgk7xSMkg88HykUZaZ/ozIiqJqSnGpgXCtED
+oQ4OJw5ozAr+/wudOawaMwUWQl5asD8fuy/hl5S1nv9XxIc842QJOtJFxhyeMIXt
+LVECVw/dPekhMjS3Zo3wwRgYbnKG7YXXT5WMxJEnHu8+cYpMiRClzq2BEP6/MtI2
+AZQQUFu2yFjRGL2OZA6IYjxnXYiRAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8w
+HQYDVR0OBBYEFADCcQCPX2HmkqQcmuHfiQ2jjqnrMA4GA1UdDwEB/wQEAwIBhjAN
+BgkqhkiG9w0BAQwFAAOCAgEASXkGQ2eUmudIKPeOIF7RBryCoPmMOsqP0+1qxF8l
+pGkwmrgNDGpmd9s0ArfIVBTc1jmpgB3oiRW9c6n2OmwBKL4UPuQ8O3KwSP0iD2sZ
+KMXoMEyphCEzW1I2GRvYDugL3Z9MWrnHkoaoH2l8YyTYvszTvdgxBPpM2x4pSkp+
+76d4/eRpJ5mVuQ93nC+YG0wXCxSq63hX4kyZgPxgCdAA+qgFfKIGyNqUIqWgeyTP
+n5OgKaboYk2141Rf2hGMD3/hsGm0rrJh7g3C0ZirPws3eeJfulvAOIy2IZzqHUSY
+jkFzraz6LEH3IlArT3jUPvWKqvh2lJWnnp56aqxBR7qHH5voD49UpJWY1K0BjGnS
+OHcurpp0Yt/BIs4VZeWdCZwI7JaSeDcPMaMDBvND3Ia5Fga0thgYQTG6dE+N5fgF
+z+hRaujXO2nb0LmddVyvE8prYlWRMuYFv+Co8hcMdJ0lEZlfVNu0jbm9/GmwAZ+l
+9umeYO9yz/uC7edC8XJBglMAKUmVK9wNtOckUWAcCfnPWYLbYa/PqtXBYcxrso5j
+iaS/A7iEW51uteHBGrViCy1afGG+hiUWwFlesli+Rq4dNstX3h6h2baWABaAxEVJ
+y1RnTQSz6mROT1VmZSgSVO37rgIyY0Hf0872ogcTS+FfvXgBxCxsNWEbiQ/XXva4
+0Ws=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICtDCCAjqgAwIBAgIRAMyaTlVLN0ndGp4ffwKAfoMwCgYIKoZIzj0EAwMwgZkx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEyMDAGA1UEAwwpQW1h
+em9uIFJEUyBtZS1jZW50cmFsLTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjIwNTA3MDA0NDM3WhgPMjEyMjA1MDcwMTQ0MzdaMIGZMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMjAwBgNVBAMMKUFtYXpv
+biBSRFMgbWUtY2VudHJhbC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE19nCV1nsI6CohSor13+B25cr
+zg+IHdi9Y3L7ziQnHWI6yjBazvnKD+oC71aRRlR8b5YXsYGUQxWzPLHN7EGPcSGv
+bzA9SLG1KQYCJaQ0m9Eg/iGrwKWOgylbhVw0bCxoo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS4KsknsJXM9+QPEkBdZxUPaLr11zAOBgNVHQ8BAf8EBAMC
+AYYwCgYIKoZIzj0EAwMDaAAwZQIxAJaRgrYIEfXQMZQQDxMTYS0azpyWSseQooXo
+L3nYq4OHGBgYyQ9gVjvRYWU85PXbfgIwdi82DtANQFkCu+j+BU0JBY/uRKPEeYzo
+JG92igKIcXPqCoxIJ7lJbbzmuf73gQu5
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGATCCA+mgAwIBAgIRAJwCobx0Os8F7ihbJngxrR8wDQYJKoZIhvcNAQEMBQAw
+gZgxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwo
+QW1hem9uIFJEUyBtZS1zb3V0aC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTAgFw0yMTA1MjAxNzE1MzNaGA8yMTIxMDUyMDE4MTUzM1owgZgx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwoQW1h
+em9uIFJEUyBtZS1zb3V0aC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwH
+U2VhdHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANukKwlm+ZaI
+Y5MkWGbEVLApEyLmlrHLEg8PfiiEa9ts7jssQcin3bzEPdTqGr5jo91ONoZ3ccWq
+xJgg1W3bLu5CAO2CqIOXTXHRyCO/u0Ch1FGgWB8xETPSi3UHt/Vn1ltdO6DYdbDU
+mYgwzYrvLBdRCwxsb9o+BuYQHVFzUYonqk/y9ujz3gotzFq7r55UwDTA1ita3vb4
+eDKjIb4b1M4Wr81M23WHonpje+9qkkrAkdQcHrkgvSCV046xsq/6NctzwCUUNsgF
+7Q1a8ut5qJEYpz5ta8vI1rqFqAMBqCbFjRYlmAoTTpFPOmzAVxV+YoqTrW5A16su
+/2SXlMYfJ/n/ad/QfBNPPAAQMpyOr2RCL/YiL/PFZPs7NxYjnZHNWxMLSPgFyI+/
+t2klnn5jR76KJK2qimmaXedB90EtFsMRUU1e4NxH9gDuyrihKPJ3aVnZ35mSipvR
+/1KB8t8gtFXp/VQaz2sg8+uxPMKB81O37fL4zz6Mg5K8+aq3ejBiyHucpFGnsnVB
+3kQWeD36ONkybngmgWoyPceuSWm1hQ0Z7VRAQX+KlxxSaHmSaIk1XxZu9h9riQHx
+fMuev6KXjRn/CjCoUTn+7eFrt0dT5GryQEIZP+nA0oq0LKxogigHNZlwAT4flrqb
+JUfZJrqgoce5HjZSXl10APbtPjJi0fW9AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wHQYDVR0OBBYEFEfV+LztI29OVDRm0tqClP3NrmEWMA4GA1UdDwEB/wQEAwIB
+hjANBgkqhkiG9w0BAQwFAAOCAgEAvSNe+0wuk53KhWlRlRf2x/97H2Q76X3anzF0
+5fOSVm022ldALzXMzqOfdnoKIhAu2oVKiHHKs7mMas+T6TL+Mkphx0CYEVxFE3PG
+061q3CqJU+wMm9W9xsB79oB2XG47r1fIEywZZ3GaRsatAbjcNOT8uBaATPQAfJFN
+zjFe4XyN+rA4cFrYNvfHTeu5ftrYmvks7JlRaJgEGWsz+qXux7uvaEEVPqEumd2H
+uYeaRNOZ2V23R009X5lbgBFx9tq5VDTnKhQiTQ2SeT0rc1W3Dz5ik6SbQQNP3nSR
+0Ywy7r/sZ3fcDyfFiqnrVY4Ympfvb4YW2PZ6OsQJbzH6xjdnTG2HtzEU30ngxdp1
+WUEF4zt6rjJCp7QBUqXgdlHvJqYu6949qtWjEPiFN9uSsRV2i1YDjJqN52dLjAPn
+AipJKo8x1PHTwUzuITqnB9BdP+5TlTl8biJfkEf/+08eWDTLlDHr2VrZLOLompTh
+bS5OrhDmqA2Q+O+EWrTIhMflwwlCpR9QYM/Xwvlbad9H0FUHbJsCVNaru3wGOgWo
+tt3dNSK9Lqnv/Ej9K9v6CRr36in4ylJKivhJ5B9E7ABHg7EpBJ1xi7O5eNDkNoJG
++pFyphJq3AkBR2U4ni2tUaTAtSW2tks7IaiDV+UMtqZyGabT5ISQfWLLtLHSWn2F
+Tspdjbg=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIRAJZFh4s9aZGzKaTMLrSb4acwDQYJKoZIhvcNAQELBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBCZXRhIHVzLWVhc3QtMSBSb290IENBIFJTQTIwNDggRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNTE4MjEyODQxWhgPMjA2MTA1MTgyMjI4NDFa
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgQmV0YSB1cy1lYXN0LTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
+17i2yoU6diep+WrqxIn2CrDEO2NdJVwWTSckx4WMZlLpkQDoymSmkNHjq9ADIApD
+A31Cx+843apL7wub8QkFZD0Tk7/ThdHWJOzcAM3ov98QBPQfOC1W5zYIIRP2F+vQ
+TRETHQnLcW3rLv0NMk5oQvIKpJoC9ett6aeVrzu+4cU4DZVWYlJUoC/ljWzCluau
+8blfW0Vwin6OB7s0HCG5/wijQWJBU5SrP/KAIPeQi1GqG5efbqAXDr/ple0Ipwyo
+Xjjl73LenGUgqpANlC9EAT4i7FkJcllLPeK3NcOHjuUG0AccLv1lGsHAxZLgjk/x
+z9ZcnVV9UFWZiyJTKxeKPwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBRWyMuZUo4gxCR3Luf9/bd2AqZ7CjAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZI
+hvcNAQELBQADggEBAIqN2DlIKlvDFPO0QUZQVFbsi/tLdYM98/vvzBpttlTGVMyD
+gJuQeHVz+MnhGIwoCGOlGU3OOUoIlLAut0+WG74qYczn43oA2gbMd7HoD7oL/IGg
+njorBwJVcuuLv2G//SqM3nxGcLRtkRnQ+lvqPxMz9+0fKFUn6QcIDuF0QSfthLs2
+WSiGEPKO9c9RSXdRQ4pXA7c3hXng8P4A2ZmdciPne5Nu4I4qLDGZYRrRLRkNTrOi
+TyS6r2HNGUfgF7eOSeKt3NWL+mNChcYj71/Vycf5edeczpUgfnWy9WbPrK1svKyl
+aAs2xg+X6O8qB+Mnj2dNBzm+lZIS3sIlm+nO9sg=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjSgAwIBAgIRAPAlEk8VJPmEzVRRaWvTh2AwCgYIKoZIzj0EAwMwgZYx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1h
+em9uIFJEUyB1cy1lYXN0LTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTI1MjI0MTU1WhgPMjEyMTA1MjUyMzQxNTVaMIGWMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExLzAtBgNVBAMMJkFtYXpvbiBS
+RFMgdXMtZWFzdC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdTZWF0dGxl
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEx5xjrup8II4HOJw15NTnS3H5yMrQGlbj
+EDA5MMGnE9DmHp5dACIxmPXPMe/99nO7wNdl7G71OYPCgEvWm0FhdvVUeTb3LVnV
+BnaXt32Ek7/oxGk1T+Df03C+W0vmuJ+wo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0G
+A1UdDgQWBBTGXmqBWN/1tkSea4pNw0oHrjk2UDAOBgNVHQ8BAf8EBAMCAYYwCgYI
+KoZIzj0EAwMDaAAwZQIxAIqqZWCSrIkZ7zsv/FygtAusW6yvlL935YAWYPVXU30m
+jkMFLM+/RJ9GMvnO8jHfCgIwB+whlkcItzE9CRQ6CsMo/d5cEHDUu/QW6jSIh9BR
+OGh9pTYPVkUbBiKPA7lVVhre
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/zCCA+egAwIBAgIRAJGY9kZITwfSRaAS/bSBOw8wDQYJKoZIhvcNAQEMBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyBzYS1lYXN0LTEgUm9vdCBDQSBSU0E0MDk2IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUxOTE4MTEyMFoYDzIxMjEwNTE5MTkxMTIwWjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIHNhLWVhc3QtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDe2vlDp6Eo4WQi
+Wi32YJOgdXHhxTFrLjB9SRy22DYoMaWfginJIwJcSR8yse8ZDQuoNhERB9LRggAE
+eng23mhrfvtL1yQkMlZfBu4vG1nOb22XiPFzk7X2wqz/WigdYNBCqa1kK3jrLqPx
+YUy7jk2oZle4GLVRTNGuMfcid6S2hs3UCdXfkJuM2z2wc3WUlvHoVNk37v2/jzR/
+hSCHZv5YHAtzL/kLb/e64QkqxKll5QmKhyI6d7vt6Lr1C0zb+DmwxUoJhseAS0hI
+dRk5DklMb4Aqpj6KN0ss0HAYqYERGRIQM7KKA4+hxDMUkJmt8KqWKZkAlCZgflzl
+m8NZ31o2cvBzf6g+VFHx+6iVrSkohVQydkCxx7NJ743iPKsh8BytSM4qU7xx4OnD
+H2yNXcypu+D5bZnVZr4Pywq0w0WqbTM2bpYthG9IC4JeVUvZ2mDc01lqOlbMeyfT
+og5BRPLDXdZK8lapo7se2teh64cIfXtCmM2lDSwm1wnH2iSK+AWZVIM3iE45WSGc
+vZ+drHfVgjJJ5u1YrMCWNL5C2utFbyF9Obw9ZAwm61MSbPQL9JwznhNlCh7F2ANW
+ZHWQPNcOAJqzE4uVcJB1ZeVl28ORYY1668lx+s9yYeMXk3QQdj4xmdnvoBFggqRB
+ZR6Z0D7ZohADXe024RzEo1TukrQgKQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBT7Vs4Y5uG/9aXnYGNMEs6ycPUT3jAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQEMBQADggIBACN4Htp2PvGcQA0/sAS+qUVWWJoAXSsu8Pgc6Gar
+7tKVlNJ/4W/a6pUV2Xo/Tz3msg4yiE8sMESp2k+USosD5n9Alai5s5qpWDQjrqrh
+76AGyF2nzve4kIN19GArYhm4Mz/EKEG1QHYvBDGgXi3kNvL/a2Zbybp+3LevG+q7
+xtx4Sz9yIyMzuT/6Y7ijtiMZ9XbuxGf5wab8UtwT3Xq1UradJy0KCkzRJAz/Wy/X
+HbTkEvKSaYKExH6sLo0jqdIjV/d2Io31gt4e0Ly1ER2wPyFa+pc/swu7HCzrN+iz
+A2ZM4+KX9nBvFyfkHLix4rALg+WTYJa/dIsObXkdZ3z8qPf5A9PXlULiaa1mcP4+
+rokw74IyLEYooQ8iSOjxumXhnkTS69MAdGzXYE5gnHokABtGD+BB5qLhtLt4fqAp
+8AyHpQWMyV42M9SJLzQ+iOz7kAgJOBOaVtJI3FV/iAg/eqWVm3yLuUTWDxSHrKuL
+N19+pSjF6TNvUSFXwEa2LJkfDqIOCE32iOuy85QY//3NsgrSQF6UkSPa95eJrSGI
+3hTRYYh3Up2GhBGl1KUy7/o0k3KRZTk4s38fylY8bZ3TakUOH5iIGoHyFVVcp361
+Pyy25SzFSmNalWoQd9wZVc/Cps2ldxhcttM+WLkFNzprd0VJa8qTz8vYtHP0ouDN
+nWS0
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICtDCCAjmgAwIBAgIQKKqVZvk6NsLET+uYv5myCzAKBggqhkjOPQQDAzCBmTEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTIwMAYDVQQDDClBbWF6
+b24gUkRTIGlsLWNlbnRyYWwtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwH
+U2VhdHRsZTAgFw0yMjEyMDIyMDMyMjBaGA8yMTIyMTIwMjIxMzIyMFowgZkxCzAJ
+BgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMw
+EQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEyMDAGA1UEAwwpQW1hem9u
+IFJEUyBpbC1jZW50cmFsLTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASYwfvj8BmvLAP6UkNQ4X4dXBB/
+webBO7swW+8HnFN2DAu+Cn/lpcDpu+dys1JmkVX435lrCH3oZjol0kCDIM1lF4Cv
++78yoY1Jr/YMat22E4iz4AZd9q0NToS7+ZA0r2yjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wHQYDVR0OBBYEFO/8Py16qPr7J2GWpvxlTMB+op7XMA4GA1UdDwEB/wQEAwIB
+hjAKBggqhkjOPQQDAwNpADBmAjEAwk+rg788+u8JL6sdix7l57WTo8E/M+o3TO5x
+uRuPdShrBFm4ArGR2PPs4zCQuKgqAjEAi0TA3PVqAxKpoz+Ps8/054p9WTgDfBFZ
+i/lm2yTaPs0xjY6FNWoy7fsVw5oEKxOn
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGCTCCA/GgAwIBAgIRAOY7gfcBZgR2tqfBzMbFQCUwDQYJKoZIhvcNAQEMBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1zb3V0aGVhc3QtNCBSb290IENBIFJTQTQwOTYgRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjIwNTI1MTY1NDU5WhgPMjEyMjA1MjUxNzU0NTla
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtc291dGhlYXN0LTQgUm9vdCBDQSBSU0E0MDk2IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA
+lfxER43FuLRdL08bddF0YhbCP+XXKj1A/TFMXmd2My8XDei8rPXFYyyjMig9+xZw
+uAsIxLwz8uiA26CKA8bCZKg5VG2kTeOJAfvBJaLv1CZefs3Z4Uf1Sjvm6MF2yqEj
+GoORfyfL9HiZFTDuF/hcjWoKYCfMuG6M/wO8IbdICrX3n+BiYQJu/pFO660Mg3h/
+8YBBWYDbHoCiH/vkqqJugQ5BM3OI5nsElW51P1icEEqti4AZ7JmtSv9t7fIFBVyR
+oaEyOgpp0sm193F/cDJQdssvjoOnaubsSYm1ep3awZAUyGN/X8MBrPY95d0hLhfH
+Ehc5Icyg+hsosBljlAyksmt4hFQ9iBnWIz/ZTfGMck+6p3HVL9RDgvluez+rWv59
+8q7omUGsiPApy5PDdwI/Wt/KtC34/2sjslIJfvgifdAtkRPkhff1WEwER00ADrN9
+eGGInaCpJfb1Rq8cV2n00jxg7DcEd65VR3dmIRb0bL+jWK62ni/WdEyomAOMfmGj
+aWf78S/4rasHllWJ+QwnaUYY3u6N8Cgio0/ep4i34FxMXqMV3V0/qXdfhyabi/LM
+wCxNo1Dwt+s6OtPJbwO92JL+829QAxydfmaMTeHBsgMPkG7RwAekeuatKGHNsc2Z
+x2Q4C2wVvOGAhcHwxfM8JfZs3nDSZJndtVVnFlUY0UECAwEAAaNCMEAwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUpnG7mWazy6k97/tb5iduRB3RXgQwDgYDVR0P
+AQH/BAQDAgGGMA0GCSqGSIb3DQEBDAUAA4ICAQCDLqq1Wwa9Tkuv7vxBnIeVvvFF
+ecTn+P+wJxl9Qa2ortzqTHZsBDyJO62d04AgBwiDXkJ9a+bthgG0H1J7Xee8xqv1
+xyX2yKj24ygHjspLotKP4eDMdDi5TYq+gdkbPmm9Q69B1+W6e049JVGXvWG8/7kU
+igxeuCYwtCCdUPRLf6D8y+1XMGgVv3/DSOHWvTg3MJ1wJ3n3+eve3rjGdRYWZeJu
+k21HLSZYzVrCtUsh2YAeLnUbSxVuT2Xr4JehYe9zW5HEQ8Je/OUfnCy9vzoN/ITw
+osAH+EBJQey7RxEDqMwCaRefH0yeHFcnOll0OXg/urnQmwbEYzQ1uutJaBPsjU0J
+Qf06sMxI7GiB5nPE+CnI2sM6A9AW9kvwexGXpNJiLxF8dvPQthpOKGcYu6BFvRmt
+6ctfXd9b7JJoVqMWuf5cCY6ihpk1e9JTlAqu4Eb/7JNyGiGCR40iSLvV28un9wiE
+plrdYxwcNYq851BEu3r3AyYWw/UW1AKJ5tM+/Gtok+AphMC9ywT66o/Kfu44mOWm
+L3nSLSWEcgfUVgrikpnyGbUnGtgCmHiMlUtNVexcE7OtCIZoVAlCGKNu7tyuJf10
+Qlk8oIIzfSIlcbHpOYoN79FkLoDNc2er4Gd+7w1oPQmdAB0jBJnA6t0OUBPKdDdE
+Ufff2jrbfbzECn1ELg==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGCDCCA/CgAwIBAgIQIuO1A8LOnmc7zZ/vMm3TrDANBgkqhkiG9w0BAQwFADCB
+nDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTUwMwYDVQQDDCxB
+bWF6b24gUkRTIGFwLXNvdXRoZWFzdC0yIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4G
+A1UEBwwHU2VhdHRsZTAgFw0yMTA1MjQyMDQ2MThaGA8yMTIxMDUyNDIxNDYxOFow
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1zb3V0aGVhc3QtMiBSb290IENBIFJTQTQwOTYgRzExEDAO
+BgNVBAcMB1NlYXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDq
+qRHKbG8ZK6/GkGm2cenznEF06yHwI1gD5sdsHjTgekDZ2Dl9RwtDmUH2zFuIQwGj
+SeC7E2iKwrJRA5wYzL9/Vk8NOILEKQOP8OIKUHbc7q8rEtjs401KcU6pFBBEdO9G
+CTiRhogq+8mhC13AM/UriZJbKhwgM2UaDOzAneGMhQAGjH8z83NsNcPxpYVE7tqM
+sch5yLtIJLkJRusrmQQTeHUev16YNqyUa+LuFclFL0FzFCimkcxUhXlbfEKXbssS
+yPzjiv8wokGyo7+gA0SueceMO2UjfGfute3HlXZDcNvBbkSY+ver41jPydyRD6Qq
+oEkh0tyIbPoa3oU74kwipJtz6KBEA3u3iq61OUR0ENhR2NeP7CSKrC24SnQJZ/92
+qxusrbyV/0w+U4m62ug/o4hWNK1lUcc2AqiBOvCSJ7qpdteTFxcEIzDwYfERDx6a
+d9+3IPvzMb0ZCxBIIUFMxLTF7yAxI9s6KZBBXSZ6tDcCCYIgEysEPRWMRAcG+ye/
+fZVn9Vnzsj4/2wchC2eQrYpb1QvG4eMXA4M5tFHKi+/8cOPiUzJRgwS222J8YuDj
+yEBval874OzXk8H8Mj0JXJ/jH66WuxcBbh5K7Rp5oJn7yju9yqX6qubY8gVeMZ1i
+u4oXCopefDqa35JplQNUXbWwSebi0qJ4EK0V8F9Q+QIDAQABo0IwQDAPBgNVHRMB
+Af8EBTADAQH/MB0GA1UdDgQWBBT4ysqCxaPe7y+g1KUIAenqu8PAgzAOBgNVHQ8B
+Af8EBAMCAYYwDQYJKoZIhvcNAQEMBQADggIBALU8WN35KAjPZEX65tobtCDQFkIO
+uJjv0alD7qLB0i9eY80C+kD87HKqdMDJv50a5fZdqOta8BrHutgFtDm+xo5F/1M3
+u5/Vva5lV4xy5DqPajcF4Mw52czYBmeiLRTnyPJsU93EQIC2Bp4Egvb6LI4cMOgm
+4pY2hL8DojOC5PXt4B1/7c1DNcJX3CMzHDm4SMwiv2MAxSuC/cbHXcWMk+qXdrVx
++ayLUSh8acaAOy3KLs1MVExJ6j9iFIGsDVsO4vr4ZNsYQiyHjp+L8ops6YVBO5AT
+k/pI+axHIVsO5qiD4cFWvkGqmZ0gsVtgGUchZaacboyFsVmo6QPrl28l6LwxkIEv
+GGJYvIBW8sfqtGRspjfX5TlNy5IgW/VOwGBdHHsvg/xpRo31PR3HOFw7uPBi7cAr
+FiZRLJut7af98EB2UvovZnOh7uIEGPeecQWeOTQfJeWet2FqTzFYd0NUMgqPuJx1
+vLKferP+ajAZLJvVnW1J7Vccx/pm0rMiUJEf0LRb/6XFxx7T2RGjJTi0EzXODTYI
+gnLfBBjnolQqw+emf4pJ4pAtly0Gq1KoxTG2QN+wTd4lsCMjnelklFDjejwnl7Uy
+vtxzRBAu/hi/AqDkDFf94m6j+edIrjbi9/JDFtQ9EDlyeqPgw0qwi2fwtJyMD45V
+fejbXelUSJSzDIdY
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGCTCCA/GgAwIBAgIRAN7Y9G9i4I+ZaslPobE7VL4wDQYJKoZIhvcNAQEMBQAw
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1ub3J0aGVhc3QtMiBSb290IENBIFJTQTQwOTYgRzExEDAO
+BgNVBAcMB1NlYXR0bGUwIBcNMjEwNTIwMTYzMzIzWhgPMjEyMTA1MjAxNzMzMjNa
+MIGcMQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywg
+SW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExNTAzBgNVBAMM
+LEFtYXpvbiBSRFMgYXAtbm9ydGhlYXN0LTIgUm9vdCBDQSBSU0E0MDk2IEcxMRAw
+DgYDVQQHDAdTZWF0dGxlMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA
+4BEPCiIfiK66Q/qa8k+eqf1Q3qsa6Xuu/fPkpuStXVBShhtXd3eqrM0iT4Xxs420
+Va0vSB3oZ7l86P9zYfa60n6PzRxdYFckYX330aI7L/oFIdaodB/C9szvROI0oLG+
+6RwmIF2zcprH0cTby8MiM7G3v9ykpq27g4WhDC1if2j8giOQL3oHpUaByekZNIHF
+dIllsI3RkXmR3xmmxoOxJM1B9MZi7e1CvuVtTGOnSGpNCQiqofehTGwxCN2wFSK8
+xysaWlw48G0VzZs7cbxoXMH9QbMpb4tpk0d+T8JfAPu6uWO9UwCLWWydf0CkmA/+
+D50/xd1t33X9P4FEaPSg5lYbHXzSLWn7oLbrN2UqMLaQrkoEBg/VGvzmfN0mbflw
++T87bJ/VEOVNlG+gepyCTf89qIQVWOjuYMox4sK0PjzZGsYEuYiq1+OUT3vk/e5K
+ag1fCcq2Isy4/iwB2xcXrsQ6ljwdk1fc+EmOnjGKrhuOHJY3S+RFv4ToQBsVyYhC
+XGaC3EkqIX0xaCpDimxYhFjWhpDXAjG/zJ+hRLDAMCMhl/LPGRk/D1kzSbPmdjpl
+lEMK5695PeBvEBTQdBQdOiYgOU3vWU6tzwwHfiM2/wgvess/q0FDAHfJhppbgbb9
+3vgsIUcsvoC5o29JvMsUxsDRvsAfEmMSDGkJoA/X6GECAwEAAaNCMEAwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUgEWm1mZCbGD6ytbwk2UU1aLaOUUwDgYDVR0P
+AQH/BAQDAgGGMA0GCSqGSIb3DQEBDAUAA4ICAQBb4+ABTGBGwxK1U/q4g8JDqTQM
+1Wh8Oz8yAk4XtPJMAmCctxbd81cRnSnePWw/hxViLVtkZ/GsemvXfqAQyOn1coN7
+QeYSw+ZOlu0j2jEJVynmgsR7nIRqE7QkCyZAU+d2FTJUfmee+IiBiGyFGgxz9n7A
+JhBZ/eahBbiuoOik/APW2JWLh0xp0W0GznfJ8lAlaQTyDa8iDXmVtbJg9P9qzkvl
+FgPXQttzEOyooF8Pb2LCZO4kUz+1sbU7tHdr2YE+SXxt6D3SBv+Yf0FlvyWLiqVk
+GDEOlPPTDSjAWgKnqST8UJ0RDcZK/v1ixs7ayqQJU0GUQm1I7LGTErWXHMnCuHKe
+UKYuiSZwmTcJ06NgdhcCnGZgPq13ryMDqxPeltQc3n5eO7f1cL9ERYLDLOzm6A9P
+oQ3MfcVOsbHgGHZWaPSeNrQRN9xefqBXH0ZPasgcH9WJdsLlEjVUXoultaHOKx3b
+UCCb+d3EfqF6pRT488ippOL6bk7zNubwhRa/+y4wjZtwe3kAX78ACJVcjPobH9jZ
+ErySads5zdQeaoee5wRKdp3TOfvuCe4bwLRdhOLCHWzEcXzY3g/6+ppLvNom8o+h
+Bh5X26G6KSfr9tqhQ3O9IcbARjnuPbvtJnoPY0gz3EHHGPhy0RNW8i2gl3nUp0ah
+PtjwbKW0hYAhIttT0Q==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICtzCCAj2gAwIBAgIQQRBQTs6Y3H1DDbpHGta3lzAKBggqhkjOPQQDAzCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLXNvdXRoZWFzdC0zIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDYxMTAwMTI0M1oYDzIxMjEwNjExMDExMjQzWjCBmzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTQwMgYDVQQDDCtBbWF6
+b24gUkRTIGFwLXNvdXRoZWFzdC0zIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEs0942Xj4m/gKA+WA6F5h
+AHYuek9eGpzTRoLJddM4rEV1T3eSueytMVKOSlS3Ub9IhyQrH2D8EHsLYk9ktnGR
+pATk0kCYTqFbB7onNo070lmMJmGT/Q7NgwC8cySChFxbo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBQ20iKBKiNkcbIZRu0y1uoF1yJTEzAOBgNVHQ8BAf8E
+BAMCAYYwCgYIKoZIzj0EAwMDaAAwZQIwYv0wTSrpQTaPaarfLN8Xcqrqu3hzl07n
+FrESIoRw6Cx77ZscFi2/MV6AFyjCV/TlAjEAhpwJ3tpzPXpThRML8DMJYZ3YgMh3
+CMuLqhPpla3cL0PhybrD27hJWl29C4el6aMO
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrDCCAjOgAwIBAgIQGcztRyV40pyMKbNeSN+vXTAKBggqhkjOPQQDAzCBljEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMS8wLQYDVQQDDCZBbWF6
+b24gUkRTIHVzLWVhc3QtMiBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTAgFw0yMTA1MjEyMzE1NTZaGA8yMTIxMDUyMjAwMTU1NlowgZYxCzAJBgNV
+BAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYD
+VQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1hem9uIFJE
+UyB1cy1lYXN0LTIgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1NlYXR0bGUw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQfDcv+GGRESD9wT+I5YIPRsD3L+/jsiIis
+Tr7t9RSbFl+gYpO7ZbDXvNbV5UGOC5lMJo/SnqFRTC6vL06NF7qOHfig3XO8QnQz
+6T5uhhrhnX2RSY3/10d2kTyHq3ZZg3+jQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFLDyD3PRyNXpvKHPYYxjHXWOgfPnMA4GA1UdDwEB/wQEAwIBhjAKBggq
+hkjOPQQDAwNnADBkAjB20HQp6YL7CqYD82KaLGzgw305aUKw2aMrdkBR29J183jY
+6Ocj9+Wcif9xnRMS+7oCMAvrt03rbh4SU9BohpRUcQ2Pjkh7RoY0jDR4Xq4qzjNr
+5UFr3BXpFvACxXF51BksGQ==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQeKbS5zvtqDvRtwr5H48cAjAKBggqhkjOPQQDAzCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIG1lLXNvdXRoLTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTIwMTcxOTU1WhgPMjEyMTA1MjAxODE5NTVaMIGXMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpvbiBS
+RFMgbWUtc291dGgtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2VhdHRs
+ZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABEKjgUaAPmUlRMEQdBC7BScAGosJ1zRV
+LDd38qTBjzgmwBfQJ5ZfGIvyEK5unB09MB4e/3qqK5I/L6Qn5Px/n5g4dq0c7MQZ
+u7G9GBYm90U3WRJBf7lQrPStXaRnS4A/O6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAd
+BgNVHQ4EFgQUNKcAbGEIn03/vkwd8g6jNyiRdD4wDgYDVR0PAQH/BAQDAgGGMAoG
+CCqGSM49BAMDA2cAMGQCMHIeTrjenCSYuGC6txuBt/0ZwnM/ciO9kHGWVCoK8QLs
+jGghb5/YSFGZbmQ6qpGlSAIwVOQgdFfTpEfe5i+Vs9frLJ4QKAfc27cTNYzRIM0I
+E+AJgK4C4+DiyyMzOpiCfmvq
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGCDCCA/CgAwIBAgIQSFkEUzu9FYgC5dW+5lnTgjANBgkqhkiG9w0BAQwFADCB
+nDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTUwMwYDVQQDDCxB
+bWF6b24gUkRTIGFwLXNvdXRoZWFzdC0zIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4G
+A1UEBwwHU2VhdHRsZTAgFw0yMTA2MTEwMDA4MzZaGA8yMTIxMDYxMTAxMDgzNlow
+gZwxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTE1MDMGA1UEAwws
+QW1hem9uIFJEUyBhcC1zb3V0aGVhc3QtMyBSb290IENBIFJTQTQwOTYgRzExEDAO
+BgNVBAcMB1NlYXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDx
+my5Qmd8zdwaI/KOKV9Xar9oNbhJP5ED0JCiigkuvCkg5qM36klszE8JhsUj40xpp
+vQw9wkYW4y+C8twBpzKGBvakqMnoaVUV7lOCKx0RofrnNwkZCboTBB4X/GCZ3fIl
+YTybS7Ehi1UuiaZspIT5A2jidoA8HiBPk+mTg1UUkoWS9h+MEAPa8L4DY6fGf4pO
+J1Gk2cdePuNzzIrpm2yPto+I8MRROwZ3ha7ooyymOXKtz2c7jEHHJ314boCXAv9G
+cdo27WiebewZkHHH7Zx9iTIVuuk2abyVSzvLVeGv7Nuy4lmSqa5clWYqWsGXxvZ2
+0fZC5Gd+BDUMW1eSpW7QDTk3top6x/coNoWuLSfXiC5ZrJkIKimSp9iguULgpK7G
+abMMN4PR+O+vhcB8E879hcwmS2yd3IwcPTl3QXxufqeSV58/h2ibkqb/W4Bvggf6
+5JMHQPlPHOqMCVFIHP1IffIo+Of7clb30g9FD2j3F4qgV3OLwEDNg/zuO1DiAvH1
+L+OnmGHkfbtYz+AVApkAZrxMWwoYrwpauyBusvSzwRE24vLTd2i80ZDH422QBLXG
+rN7Zas8rwIiBKacJLYtBYETw8mfsNt8gb72aIQX6cZOsphqp6hUtKaiMTVgGazl7
+tBXqbB+sIv3S9X6bM4cZJKkMJOXbnyCCLZFYv8TurwIDAQABo0IwQDAPBgNVHRMB
+Af8EBTADAQH/MB0GA1UdDgQWBBTOVtaS1b/lz6yJDvNk65vEastbQTAOBgNVHQ8B
+Af8EBAMCAYYwDQYJKoZIhvcNAQEMBQADggIBABEONg+TmMZM/PrYGNAfB4S41zp1
+3CVjslZswh/pC4kgXSf8cPJiUOzMwUevuFQj7tCqxQtJEygJM2IFg4ViInIah2kh
+xlRakEGGw2dEVlxZAmmLWxlL1s1lN1565t5kgVwM0GVfwYM2xEvUaby6KDVJIkD3
+aM6sFDBshvVA70qOggM6kU6mwTbivOROzfoIQDnVaT+LQjHqY/T+ok6IN0YXXCWl
+Favai8RDjzLDFwXSRvgIK+1c49vlFFY4W9Efp7Z9tPSZU1TvWUcKdAtV8P2fPHAS
+vAZ+g9JuNfeawhEibjXkwg6Z/yFUueQCQOs9TRXYogzp5CMMkfdNJF8byKYqHscs
+UosIcETnHwqwban99u35sWcoDZPr6aBIrz7LGKTJrL8Nis8qHqnqQBXu/fsQEN8u
+zJ2LBi8sievnzd0qI0kaWmg8GzZmYH1JCt1GXSqOFkI8FMy2bahP7TUQR1LBUKQ3
+hrOSqldkhN+cSAOnvbQcFzLr+iEYEk34+NhcMIFVE+51KJ1n6+zISOinr6mI3ckX
+6p2tmiCD4Shk2Xx/VTY/KGvQWKFcQApWezBSvDNlGe0yV71LtLf3dr1pr4ofo7cE
+rYucCJ40bfxEU/fmzYdBF32xP7AOD9U0FbOR3Mcthc6Z6w20WFC+zru8FGY08gPf
+WT1QcNdw7ntUJP/w
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrzCCAjWgAwIBAgIQARky6+5PNFRkFVOp3Ob1CTAKBggqhkjOPQQDAzCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGV1LXNvdXRoLTIgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjIwNTIzMTg0MTI4WhgPMjEyMjA1MjMxOTQxMjdaMIGXMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpvbiBS
+RFMgZXUtc291dGgtMiBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2VhdHRs
+ZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABNVGL5oF7cfIBxKyWd2PVK/S5yQfaJY3
+QFHWvEdt6951n9JhiiPrHzfVHsxZp1CBjILRMzjgRbYWmc8qRoLkgGE7htGdwudJ
+Fa/WuKzO574Prv4iZXUnVGTboC7JdvKbh6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAd
+BgNVHQ4EFgQUgDeIIEKynwUbNXApdIPnmRWieZwwDgYDVR0PAQH/BAQDAgGGMAoG
+CCqGSM49BAMDA2gAMGUCMEOOJfucrST+FxuqJkMZyCM3gWGZaB+/w6+XUAJC6hFM
+uSTY0F44/bERkA4XhH+YGAIxAIpJQBakCA1/mXjsTnQ+0El9ty+LODp8ibkn031c
+8DKDS7pR9UK7ZYdR6zFg3ZCjQw==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjOgAwIBAgIQJvkWUcYLbnxtuwnyjMmntDAKBggqhkjOPQQDAzCBljEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMS8wLQYDVQQDDCZBbWF6
+b24gUkRTIGV1LXdlc3QtMyBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTAgFw0yMTA1MjUyMjI2MTJaGA8yMTIxMDUyNTIzMjYxMlowgZYxCzAJBgNV
+BAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMwEQYD
+VQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1hem9uIFJE
+UyBldS13ZXN0LTMgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1NlYXR0bGUw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAARENn8uHCyjn1dFax4OeXxvbV861qsXFD9G
+DshumTmFzWWHN/69WN/AOsxy9XN5S7Cgad4gQgeYYYgZ5taw+tFo/jQvCLY//uR5
+uihcLuLJ78opvRPvD9kbWZ6oXfBtFkWjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFKiK3LpoF+gDnqPldGSwChBPCYciMA4GA1UdDwEB/wQEAwIBhjAKBggq
+hkjOPQQDAwNpADBmAjEA+7qfvRlnvF1Aosyp9HzxxCbN7VKu+QXXPhLEBWa5oeWW
+UOcifunf/IVLC4/FGCsLAjEAte1AYp+iJyOHDB8UYkhBE/1sxnFaTiEPbvQBU0wZ
+SuwWVLhu2wWDuSW+K7tTuL8p
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/zCCAuegAwIBAgIRAKeDpqX5WFCGNo94M4v69sUwDQYJKoZIhvcNAQELBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyBldS13ZXN0LTMgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyNTIyMTgzM1oYDzIwNjEwNTI1MjMxODMzWjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGV1LXdlc3QtMyBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCcKOTEMTfzvs4H
+WtJR8gI7GXN6xesulWtZPv21oT+fLGwJ+9Bv8ADCGDDrDxfeH/HxJmzG9hgVAzVn
+4g97Bn7q07tGZM5pVi96/aNp11velZT7spOJKfJDZTlGns6DPdHmx48whpdO+dOb
+6+eR0VwCIv+Vl1fWXgoACXYCoKjhxJs+R+fwY//0JJ1YG8yjZ+ghLCJmvlkOJmE1
+TCPUyIENaEONd6T+FHGLVYRRxC2cPO65Jc4yQjsXvvQypoGgx7FwD5voNJnFMdyY
+754JGPOOe/SZdepN7Tz7UEq8kn7NQSbhmCsgA/Hkjkchz96qN/YJ+H/okiQUTNB0
+eG9ogiVFAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFjayw9Y
+MjbxfF14XAhMM2VPl0PfMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOC
+AQEAAtmx6d9+9CWlMoU0JCirtp4dSS41bBfb9Oor6GQ8WIr2LdfZLL6uES/ubJPE
+1Sh5Vu/Zon5/MbqLMVrfniv3UpQIof37jKXsjZJFE1JVD/qQfRzG8AlBkYgHNEiS
+VtD4lFxERmaCkY1tjKB4Dbd5hfhdrDy29618ZjbSP7NwAfnwb96jobCmMKgxVGiH
+UqsLSiEBZ33b2hI7PJ6iTJnYBWGuiDnsWzKRmheA4nxwbmcQSfjbrNwa93w3caL2
+v/4u54Kcasvcu3yFsUwJygt8z43jsGAemNZsS7GWESxVVlW93MJRn6M+MMakkl9L
+tWaXdHZ+KUV7LhfYLb0ajvb40w==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEBDCCAuygAwIBAgIQJ5oxPEjefCsaESSwrxk68DANBgkqhkiG9w0BAQsFADCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIGV1LWNlbnRyYWwtMiBSb290IENBIFJTQTIwNDggRzExEDAOBgNV
+BAcMB1NlYXR0bGUwIBcNMjIwNjA2MjExNzA1WhgPMjA2MjA2MDYyMjE3MDVaMIGa
+MQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5j
+LjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMzAxBgNVBAMMKkFt
+YXpvbiBSRFMgZXUtY2VudHJhbC0yIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALTQt5eX
+g+VP3BjO9VBkWJhE0GfLrU/QIk32I6WvrnejayTrlup9H1z4QWlXF7GNJrqScRMY
+KhJHlcP05aPsx1lYco6pdFOf42ybXyWHHJdShj4A5glU81GTT+VrXGzHSarLmtua
+eozkQgPpDsSlPt0RefyTyel7r3Cq+5K/4vyjCTcIqbfgaGwTU36ffjM1LaPCuE4O
+nINMeD6YuImt2hU/mFl20FZ+IZQUIFZZU7pxGLqTRz/PWcH8tDDxnkYg7tNuXOeN
+JbTpXrw7St50/E9ZQ0llGS+MxJD8jGRAa/oL4G/cwnV8P2OEPVVkgN9xDDQeieo0
+3xkzolkDkmeKOnUCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+bwu8635iQGQMRanekesORM8Hkm4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+CwUAA4IBAQAgN6LE9mUgjsj6xGCX1afYE69fnmCjjb0rC6eEe1mb/QZNcyw4XBIW
+6+zTXo4mjZ4ffoxb//R0/+vdTE7IvaLgfAZgFsLKJCtYDDstXZj8ujQnGR9Pig3R
+W+LpNacvOOSJSawNQq0Xrlcu55AU4buyD5VjcICnfF1dqBMnGTnh27m/scd/ZMx/
+kapHZ/fMoK2mAgSX/NvUKF3UkhT85vSSM2BTtET33DzCPDQTZQYxFBa4rFRmFi4c
+BLlmIReiCGyh3eJhuUUuYAbK6wLaRyPsyEcIOLMQmZe1+gAFm1+1/q5Ke9ugBmjf
+PbTWjsi/lfZ5CdVAhc5lmZj/l5aKqwaS
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjSgAwIBAgIRAKKPTYKln9L4NTx9dpZGUjowCgYIKoZIzj0EAwMwgZYx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1h
+em9uIFJEUyBldS13ZXN0LTIgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTIxMjI1NTIxWhgPMjEyMTA1MjEyMzU1MjFaMIGWMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExLzAtBgNVBAMMJkFtYXpvbiBS
+RFMgZXUtd2VzdC0yIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdTZWF0dGxl
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE/owTReDvaRqdmbtTzXbyRmEpKCETNj6O
+hZMKH0F8oU9Tmn8RU7kQQj6xUKEyjLPrFBN7c+26TvrVO1KmJAvbc8bVliiJZMbc
+C0yV5PtJTalvlMZA1NnciZuhxaxrzlK1o0IwQDAPBgNVHRMBAf8EBTADAQH/MB0G
+A1UdDgQWBBT4i5HaoHtrs7Mi8auLhMbKM1XevDAOBgNVHQ8BAf8EBAMCAYYwCgYI
+KoZIzj0EAwMDaAAwZQIxAK9A+8/lFdX4XJKgfP+ZLy5ySXC2E0Spoy12Gv2GdUEZ
+p1G7c1KbWVlyb1d6subzkQIwKyH0Naf/3usWfftkmq8SzagicKz5cGcEUaULq4tO
+GzA/AMpr63IDBAqkZbMDTCmH
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrzCCAjWgAwIBAgIQTgIvwTDuNWQo0Oe1sOPQEzAKBggqhkjOPQQDAzCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGV1LW5vcnRoLTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTI0MjEwNjM4WhgPMjEyMTA1MjQyMjA2MzhaMIGXMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpvbiBS
+RFMgZXUtbm9ydGgtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwHU2VhdHRs
+ZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJuzXLU8q6WwSKXBvx8BbdIi3mPhb7Xo
+rNJBfuMW1XRj5BcKH1ZoGaDGw+BIIwyBJg8qNmCK8kqIb4cH8/Hbo3Y+xBJyoXq/
+cuk8aPrxiNoRsKWwiDHCsVxaK9L7GhHHAqNCMEAwDwYDVR0TAQH/BAUwAwEB/zAd
+BgNVHQ4EFgQUYgcsdU4fm5xtuqLNppkfTHM2QMYwDgYDVR0PAQH/BAQDAgGGMAoG
+CCqGSM49BAMDA2gAMGUCMQDz/Rm89+QJOWJecYAmYcBWCcETASyoK1kbr4vw7Hsg
+7Ew3LpLeq4IRmTyuiTMl0gMCMAa0QSjfAnxBKGhAnYxcNJSntUyyMpaXzur43ec0
+3D8npJghwC4DuICtKEkQiI5cSg==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGATCCA+mgAwIBAgIRAORIGqQXLTcbbYT2upIsSnQwDQYJKoZIhvcNAQEMBQAw
+gZgxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwo
+QW1hem9uIFJEUyBldS1zb3V0aC0yIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTAgFw0yMjA1MjMxODM0MjJaGA8yMTIyMDUyMzE5MzQyMlowgZgx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwoQW1h
+em9uIFJEUyBldS1zb3V0aC0yIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwH
+U2VhdHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPKukwsW2s/h
+1k+Hf65pOP0knVBnOnMQyT1mopp2XHGdXznj9xS49S30jYoUnWccyXgD983A1bzu
+w4fuJRHg4MFdz/NWTgXvy+zy0Roe83OPIJjUmXnnzwUHQcBa9vl6XUO65iQ3pbSi
+fQfNDFXD8cvuXbkezeADoy+iFAlzhXTzV9MD44GTuo9Z3qAXNGHQCrgRSCL7uRYt
+t1nfwboCbsVRnElopn2cTigyVXE62HzBUmAw1GTbAZeFAqCn5giBWYAfHwTUldRL
+6eEa6atfsS2oPNus4ZENa1iQxXq7ft+pMdNt0qKXTCZiiCZjmLkY0V9kWwHTRRF8
+r+75oSL//3di43QnuSCgjwMRIeWNtMud5jf3eQzSBci+9njb6DrrSUbx7blP0srg
+94/C/fYOp/0/EHH34w99Th14VVuGWgDgKahT9/COychLOubXUT6vD1As47S9KxTv
+yYleVKwJnF9cVjepODN72fNlEf74BwzgSIhUmhksmZSeJBabrjSUj3pdyo/iRZN/
+CiYz9YPQ29eXHPQjBZVIUqWbOVfdwsx0/Xu5T1e7yyXByQ3/oDulahtcoKPAFQ3J
+ee6NJK655MdS7pM9hJnU2Rzu3qZ/GkM6YK7xTlMXVouPUZov/VbiaCKbqYDs8Dg+
+UKdeNXAT6+BMleGQzly1X7vjhgeA8ugVAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wHQYDVR0OBBYEFJdaPwpCf78UolFTEn6GO85/QwUIMA4GA1UdDwEB/wQEAwIB
+hjANBgkqhkiG9w0BAQwFAAOCAgEAWkxHIT3mers5YnZRSVjmpxCLivGj1jMB9VYC
+iKqTAeIvD0940L0YaZgivQll5pue8UUcQ6M2uCdVVAsNJdmQ5XHIYiGOknYPtxzO
+aO+bnZp7VIZw/vJ49hvH6RreA2bbxYMZO/ossYdcWsWbOKHFrRmAw0AhtK/my51g
+obV7eQg+WmlE5Iqc75ycUsoZdc3NimkjBi7LQoNP1HMvlLHlF71UZhQDdq+/WdV7
+0zmg+epkki1LjgMmuPyb+xWuYkFKT1/faX+Xs62hIm5BY+aI4if4RuQ+J//0pOSs
+UajrjTo+jLGB8A96jAe8HaFQenbwMjlaHRDAF0wvbkYrMr5a6EbneAB37V05QD0Y
+Rh4L4RrSs9DX2hbSmS6iLDuPEjanHKzglF5ePEvnItbRvGGkynqDVlwF+Bqfnw8l
+0i8Hr1f1/LP1c075UjkvsHlUnGgPbLqA0rDdcxF8Fdlv1BunUjX0pVlz10Ha5M6P
+AdyWUOneOfaA5G7jjv7i9qg3r99JNs1/Lmyg/tV++gnWTAsSPFSSEte81kmPhlK3
+2UtAO47nOdTtk+q4VIRAwY1MaOR7wTFZPfer1mWs4RhKNu/odp8urEY87iIzbMWT
+QYO/4I6BGj9rEWNGncvR5XTowwIthMCj2KWKM3Z/JxvjVFylSf+s+FFfO1bNIm6h
+u3UBpZI=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICtDCCAjmgAwIBAgIQenQbcP/Zbj9JxvZ+jXbRnTAKBggqhkjOPQQDAzCBmTEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTIwMAYDVQQDDClBbWF6
+b24gUkRTIGV1LWNlbnRyYWwtMSBSb290IENBIEVDQzM4NCBHMTEQMA4GA1UEBwwH
+U2VhdHRsZTAgFw0yMTA1MjEyMjMzMjRaGA8yMTIxMDUyMTIzMzMyNFowgZkxCzAJ
+BgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMuMRMw
+EQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEyMDAGA1UEAwwpQW1hem9u
+IFJEUyBldS1jZW50cmFsLTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATlBHiEM9LoEb1Hdnd5j2VpCDOU
+5nGuFoBD8ROUCkFLFh5mHrHfPXwBc63heW9WrP3qnDEm+UZEUvW7ROvtWCTPZdLz
+Z4XaqgAlSqeE2VfUyZOZzBSgUUJk7OlznXfkCMOjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wHQYDVR0OBBYEFDT/ThjQZl42Nv/4Z/7JYaPNMly2MA4GA1UdDwEB/wQEAwIB
+hjAKBggqhkjOPQQDAwNpADBmAjEAnZWmSgpEbmq+oiCa13l5aGmxSlfp9h12Orvw
+Dq/W5cENJz891QD0ufOsic5oGq1JAjEAp5kSJj0MxJBTHQze1Aa9gG4sjHBxXn98
+4MP1VGsQuhfndNHQb4V0Au7OWnOeiobq
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/zCCAuegAwIBAgIRAMgnyikWz46xY6yRgiYwZ3swDQYJKoZIhvcNAQELBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyBldS13ZXN0LTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyMDE2NDkxMloYDzIwNjEwNTIwMTc0OTEyWjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGV1LXdlc3QtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCi8JYOc9cYSgZH
+gYPxLk6Xcc7HqzamvsnjYU98Dcb98y6iDqS46Ra2Ne02MITtU5MDL+qjxb8WGDZV
+RUA9ZS69tkTO3gldW8QdiSh3J6hVNJQW81F0M7ZWgV0gB3n76WCmfT4IWos0AXHM
+5v7M/M4tqVmCPViQnZb2kdVlM3/Xc9GInfSMCgNfwHPTXl+PXX+xCdNBePaP/A5C
+5S0oK3HiXaKGQAy3K7VnaQaYdiv32XUatlM4K2WS4AMKt+2cw3hTCjlmqKRHvYFQ
+veWCXAuc+U5PQDJ9SuxB1buFJZhT4VP3JagOuZbh5NWpIbOTxlAJOb5pGEDuJTKi
+1gQQQVEFAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNXm+N87
+OFxK9Af/bjSxDCiulGUzMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOC
+AQEAkqIbkgZ45spvrgRQ6n9VKzDLvNg+WciLtmVrqyohwwJbj4pYvWwnKQCkVc7c
+hUOSBmlSBa5REAPbH5o8bdt00FPRrD6BdXLXhaECKgjsHe1WW08nsequRKD8xVmc
+8bEX6sw/utBeBV3mB+3Zv7ejYAbDFM4vnRsWtO+XqgReOgrl+cwdA6SNQT9oW3e5
+rSQ+VaXgJtl9NhkiIysq9BeYigxqS/A13pHQp0COMwS8nz+kBPHhJTsajHCDc8F4
+HfLi6cgs9G0gaRhT8FCH66OdGSqn196sE7Y3bPFFFs/3U+vxvmQgoZC6jegQXAg5
+Prxd+VNXtNI/azitTysQPumH7A==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEBTCCAu2gAwIBAgIRAO8bekN7rUReuNPG8pSTKtEwDQYJKoZIhvcNAQELBQAw
+gZoxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEzMDEGA1UEAwwq
+QW1hem9uIFJEUyBldS1jZW50cmFsLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYD
+VQQHDAdTZWF0dGxlMCAXDTIxMDUyMTIyMjM0N1oYDzIwNjEwNTIxMjMyMzQ3WjCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIGV1LWNlbnRyYWwtMSBSb290IENBIFJTQTIwNDggRzExEDAOBgNV
+BAcMB1NlYXR0bGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCTTYds
+Tray+Q9VA5j5jTh5TunHKFQzn68ZbOzdqaoi/Rq4ohfC0xdLrxCpfqn2TGDHN6Zi
+2qGK1tWJZEd1H0trhzd9d1CtGK+3cjabUmz/TjSW/qBar7e9MA67/iJ74Gc+Ww43
+A0xPNIWcL4aLrHaLm7sHgAO2UCKsrBUpxErOAACERScVYwPAfu79xeFcX7DmcX+e
+lIqY16pQAvK2RIzrekSYfLFxwFq2hnlgKHaVgZ3keKP+nmXcXmRSHQYUUr72oYNZ
+HcNYl2+gxCc9ccPEHM7xncVEKmb5cWEWvVoaysgQ+osi5f5aQdzgC2X2g2daKbyA
+XL/z5FM9GHpS5BJjAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
+FBDAiJ7Py9/A9etNa/ebOnx5l5MGMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0B
+AQsFAAOCAQEALMh/+81fFPdJV/RrJUeoUvFCGMp8iaANu97NpeJyKitNOv7RoeVP
+WjivS0KcCqZaDBs+p6IZ0sLI5ZH098LDzzytcfZg0PsGqUAb8a0MiU/LfgDCI9Ee
+jsOiwaFB8k0tfUJK32NPcIoQYApTMT2e26lPzYORSkfuntme2PTHUnuC7ikiQrZk
+P+SZjWgRuMcp09JfRXyAYWIuix4Gy0eZ4rpRuaTK6mjAb1/LYoNK/iZ/gTeIqrNt
+l70OWRsWW8jEmSyNTIubGK/gGGyfuZGSyqoRX6OKHESkP6SSulbIZHyJ5VZkgtXo
+2XvyRyJ7w5pFyoofrL3Wv0UF8yt/GDszmg==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/zCCA+egAwIBAgIRAMDk/F+rrhdn42SfE+ghPC8wDQYJKoZIhvcNAQEMBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyBldS13ZXN0LTIgUm9vdCBDQSBSU0E0MDk2IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyMTIyNTEyMloYDzIxMjEwNTIxMjM1MTIyWjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGV1LXdlc3QtMiBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2twMALVg9vRVu
+VNqsr6N8thmp3Dy8jEGTsm3GCQ+C5P2YcGlD/T/5icfWW84uF7Sx3ezcGlvsqFMf
+Ukj9sQyqtz7qfFFugyy7pa/eH9f48kWFHLbQYm9GEgbYBIrWMp1cy3vyxuMCwQN4
+DCncqU+yNpy0CprQJEha3PzY+3yJOjDQtc3zr99lyECCFJTDUucxHzyQvX89eL74
+uh8la0lKH3v9wPpnEoftbrwmm5jHNFdzj7uXUHUJ41N7af7z7QUfghIRhlBDiKtx
+5lYZemPCXajTc3ryDKUZC/b+B6ViXZmAeMdmQoPE0jwyEp/uaUcdp+FlUQwCfsBk
+ayPFEApTWgPiku2isjdeTVmEgL8bJTDUZ6FYFR7ZHcYAsDzcwHgIu3GGEMVRS3Uf
+ILmioiyly9vcK4Sa01ondARmsi/I0s7pWpKflaekyv5boJKD/xqwz9lGejmJHelf
+8Od2TyqJScMpB7Q8c2ROxBwqwB72jMCEvYigB+Wnbb8RipliqNflIGx938FRCzKL
+UQUBmNAznR/yRRL0wHf9UAE/8v9a09uZABeiznzOFAl/frHpgdAbC00LkFlnwwgX
+g8YfEFlkp4fLx5B7LtoO6uVNFVimLxtwirpyKoj3G4M/kvSTux8bTw0heBCmWmKR
+57MS6k7ODzbv+Kpeht2hqVZCNFMxoQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBRuMnDhJjoj7DcKALj+HbxEqj3r6jAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQEMBQADggIBALSnXfx72C3ldhBP5kY4Mo2DDaGQ8FGpTOOiD95d
+0rf7I9LrsBGVqu/Nir+kqqP80PB70+Jy9fHFFigXwcPBX3MpKGxK8Cel7kVf8t1B
+4YD6A6bqlzP+OUL0uGWfZpdpDxwMDI2Flt4NEldHgXWPjvN1VblEKs0+kPnKowyg
+jhRMgBbD/y+8yg0fIcjXUDTAw/+INcp21gWaMukKQr/8HswqC1yoqW9in2ijQkpK
+2RB9vcQ0/gXR0oJUbZQx0jn0OH8Agt7yfMAnJAdnHO4M3gjvlJLzIC5/4aGrRXZl
+JoZKfJ2fZRnrFMi0nhAYDeInoS+Rwx+QzaBk6fX5VPyCj8foZ0nmqvuYoydzD8W5
+mMlycgxFqS+DUmO+liWllQC4/MnVBlHGB1Cu3wTj5kgOvNs/k+FW3GXGzD3+rpv0
+QTLuwSbMr+MbEThxrSZRSXTCQzKfehyC+WZejgLb+8ylLJUA10e62o7H9PvCrwj+
+ZDVmN7qj6amzvndCP98sZfX7CFZPLfcBd4wVIjHsFjSNEwWHOiFyLPPG7cdolGKA
+lOFvonvo4A1uRc13/zFeP0Xi5n5OZ2go8aOOeGYdI2vB2sgH9R2IASH/jHmr0gvY
+0dfBCcfXNgrS0toq0LX/y+5KkKOxh52vEYsJLdhqrveuZhQnsFEm/mFwjRXkyO7c
+2jpC
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGADCCA+igAwIBAgIQYe0HgSuFFP9ivYM2vONTrTANBgkqhkiG9w0BAQwFADCB
+mDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChB
+bWF6b24gUkRTIGV1LXNvdXRoLTEgUm9vdCBDQSBSU0E0MDk2IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUxOTE4MzMyMVoYDzIxMjEwNTE5MTkzMzIxWjCBmDEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChBbWF6
+b24gUkRTIGV1LXNvdXRoLTEgUm9vdCBDQSBSU0E0MDk2IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAuO7QPKfPMTo2
+POQWvzDLwi5f++X98hGjORI1zkN9kotCYH5pAzSBwBPoMNaIfedgmsIxGHj2fq5G
+4oXagNhNuGP79Zl6uKW5H7S74W7aWM8C0s8zuxMOI4GZy5h2IfQk3m/3AzZEX5w8
+UtNPkzo2feDVOkerHT+j+vjXgAxZ4wHnuMDcRT+K4r9EXlAH6X9b/RO0JlfEwmNz
+xlqqGxocq9qRC66N6W0HF2fNEAKP84n8H80xcZBOBthQORRi8HSmKcPdmrvwCuPz
+M+L+j18q6RAVaA0ABbD0jMWcTf0UvjUfBStn5mvu/wGlLjmmRkZsppUTRukfwqXK
+yltUsTq0tOIgCIpne5zA4v+MebbR5JBnsvd4gdh5BI01QH470yB7BkUefZ9bobOm
+OseAAVXcYFJKe4DAA6uLDrqOfFSxV+CzVvEp3IhLRaik4G5MwI/h2c/jEYDqkg2J
+HMflxc2gcSMdk7E5ByLz5f6QrFfSDFk02ZJTs4ssbbUEYohht9znPMQEaWVqATWE
+3n0VspqZyoBNkH/agE5GiGZ/k/QyeqzMNj+c9kr43Upu8DpLrz8v2uAp5xNj3YVg
+ihaeD6GW8+PQoEjZ3mrCmH7uGLmHxh7Am59LfEyNrDn+8Rq95WvkmbyHSVxZnBmo
+h/6O3Jk+0/QhIXZ2hryMflPcYWeRGH0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU2eFK7+R3x/me8roIBNxBrplkM6EwDgYDVR0PAQH/BAQDAgGG
+MA0GCSqGSIb3DQEBDAUAA4ICAQB5gWFe5s7ObQFj1fTO9L6gYgtFhnwdmxU0q8Ke
+HWCrdFmyXdC39qdAFOwM5/7fa9zKmiMrZvy9HNvCXEp4Z7z9mHhBmuqPZQx0qPgU
+uLdP8wGRuWryzp3g2oqkX9t31Z0JnkbIdp7kfRT6ME4I4VQsaY5Y3mh+hIHOUvcy
+p+98i3UuEIcwJnVAV9wTTzrWusZl9iaQ1nSYbmkX9bBssJ2GmtW+T+VS/1hJ/Q4f
+AlE3dOQkLFoPPb3YRWBHr2n1LPIqMVwDNAuWavRA2dSfaLl+kzbn/dua7HTQU5D4
+b2Fu2vLhGirwRJe+V7zdef+tI7sngXqjgObyOeG5O2BY3s+um6D4fS0Th3QchMO7
+0+GwcIgSgcjIjlrt6/xJwJLE8cRkUUieYKq1C4McpZWTF30WnzOPUzRzLHkcNzNA
+0A7sKMK6QoYWo5Rmo8zewUxUqzc9oQSrYADP7PEwGncLtFe+dlRFx+PA1a+lcIgo
+1ZGfXigYtQ3VKkcknyYlJ+hN4eCMBHtD81xDy9iP2MLE41JhLnoB2rVEtewO5diF
+7o95Mwl84VMkLhhHPeGKSKzEbBtYYBifHNct+Bst8dru8UumTltgfX6urH3DN+/8
+JF+5h3U8oR2LL5y76cyeb+GWDXXy9zoQe2QvTyTy88LwZq1JzujYi2k8QiLLhFIf
+FEv9Bg==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICsDCCAjagAwIBAgIRAMgApnfGYPpK/fD0dbN2U4YwCgYIKoZIzj0EAwMwgZcx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwnQW1h
+em9uIFJEUyBldS1zb3V0aC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMCAXDTIxMDUxOTE4MzgxMVoYDzIxMjEwNTE5MTkzODExWjCBlzELMAkG
+A1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzAR
+BgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6b24g
+UkRTIGV1LXNvdXRoLTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1NlYXR0
+bGUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQfEWl6d4qSuIoECdZPp+39LaKsfsX7
+THs3/RrtT0+h/jl3bjZ7Qc68k16x+HGcHbaayHfqD0LPdzH/kKtNSfQKqemdxDQh
+Z4pwkixJu8T1VpXZ5zzCvBXCl75UqgEFS92jQjBAMA8GA1UdEwEB/wQFMAMBAf8w
+HQYDVR0OBBYEFFPrSNtWS5JU+Tvi6ABV231XbjbEMA4GA1UdDwEB/wQEAwIBhjAK
+BggqhkjOPQQDAwNoADBlAjEA+a7hF1IrNkBd2N/l7IQYAQw8chnRZDzh4wiGsZsC
+6A83maaKFWUKIb3qZYXFSi02AjAbp3wxH3myAmF8WekDHhKcC2zDvyOiKLkg9Y6v
+ZVmyMR043dscQbcsVoacOYv198c=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICtDCCAjqgAwIBAgIRAPhVkIsQ51JFhD2kjFK5uAkwCgYIKoZIzj0EAwMwgZkx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEyMDAGA1UEAwwpQW1h
+em9uIFJEUyBldS1jZW50cmFsLTIgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjIwNjA2MjEyOTE3WhgPMjEyMjA2MDYyMjI5MTdaMIGZMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMjAwBgNVBAMMKUFtYXpv
+biBSRFMgZXUtY2VudHJhbC0yIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEA5xnIEBtG5b2nmbj49UEwQza
+yX0844fXjccYzZ8xCDUe9dS2XOUi0aZlGblgSe/3lwjg8fMcKXLObGGQfgIx1+5h
+AIBjORis/dlyN5q/yH4U5sjS8tcR0GDGVHrsRUZCo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBRK+lSGutXf4DkTjR3WNfv4+KeNFTAOBgNVHQ8BAf8EBAMC
+AYYwCgYIKoZIzj0EAwMDaAAwZQIxAJ4NxQ1Gerqr70ZrnUqc62Vl8NNqTzInamCG
+Kce3FTsMWbS9qkgrjZkO9QqOcGIw/gIwSLrwUT+PKr9+H9eHyGvpq9/3AIYSnFkb
+Cf3dyWPiLKoAtLFwjzB/CkJlsAS1c8dS
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/jCCA+agAwIBAgIQGZH12Q7x41qIh9vDu9ikTjANBgkqhkiG9w0BAQwFADCB
+lzELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdB
+bWF6b24gUkRTIGV1LXdlc3QtMyBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjEwNTI1MjIyMjMzWhgPMjEyMTA1MjUyMzIyMzNaMIGXMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpv
+biBSRFMgZXUtd2VzdC0zIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMqE47sHXWzdpuqj
+JHb+6jM9tDbQLDFnYjDWpq4VpLPZhb7xPNh9gnYYTPKG4avG421EblAHqzy9D2pN
+1z90yKbIfUb/Sy2MhQbmZomsObhONEra06fJ0Dydyjswf1iYRp2kwpx5AgkVoNo7
+3dlws73zFjD7ImKvUx2C7B75bhnw2pJWkFnGcswl8fZt9B5Yt95sFOKEz2MSJE91
+kZlHtya19OUxZ/cSGci4MlOySzqzbGwUqGxEIDlY8I39VMwXaYQ8uXUN4G780VcL
+u46FeyRGxZGz2n3hMc805WAA1V5uir87vuirTvoSVREET97HVRGVVNJJ/FM6GXr1
+VKtptybbo81nefYJg9KBysxAa2Ao2x2ry/2ZxwhS6VZ6v1+90bpZA1BIYFEDXXn/
+dW07HSCFnYSlgPtSc+Muh15mdr94LspYeDqNIierK9i4tB6ep7llJAnq0BU91fM2
+JPeqyoTtc3m06QhLf68ccSxO4l8Hmq9kLSHO7UXgtdjfRVaffngopTNk8qK7bIb7
+LrgkqhiQw/PRCZjUdyXL153/fUcsj9nFNe25gM4vcFYwH6c5trd2tUl31NTi1MfG
+Mgp3d2dqxQBIYANkEjtBDMy3SqQLIo9EymqmVP8xx2A/gCBgaxvMAsI6FSWRoC7+
+hqJ8XH4mFnXSHKtYMe6WPY+/XZgtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8w
+HQYDVR0OBBYEFIkXqTnllT/VJnI2NqipA4XV8rh1MA4GA1UdDwEB/wQEAwIBhjAN
+BgkqhkiG9w0BAQwFAAOCAgEAKjSle8eenGeHgT8pltWCw/HzWyQruVKhfYIBfKJd
+MhV4EnH5BK7LxBIvpXGsFUrb0ThzSw0fn0zoA9jBs3i/Sj6KyeZ9qUF6b8ycDXd+
+wHonmJiQ7nk7UuMefaYAfs06vosgl1rI7eBHC0itexIQmKh0aX+821l4GEgEoSMf
+loMFTLXv2w36fPHHCsZ67ODldgcZbKNnpCTX0YrCwEYO3Pz/L398btiRcWGrewrK
+jdxAAyietra8DRno1Zl87685tfqc6HsL9v8rVw58clAo9XAQvT+fmSOFw/PogRZ7
+OMHUat3gu/uQ1M5S64nkLLFsKu7jzudBuoNmcJysPlzIbqJ7vYc82OUGe9ucF3wi
+3tbKQ983hdJiTExVRBLX/fYjPsGbG3JtPTv89eg2tjWHlPhCDMMxyRKl6isu2RTq
+6VT489Z2zQrC33MYF8ZqO1NKjtyMAMIZwxVu4cGLkVsqFmEV2ScDHa5RadDyD3Ok
+m+mqybhvEVm5tPgY6p0ILPMN3yvJsMSPSvuBXhO/X5ppNnpw9gnxpwbjQKNhkFaG
+M5pkADZ14uRguOLM4VthSwUSEAr5VQYCFZhEwK+UOyJAGiB/nJz6IxL5XBNUXmRM
+Hl8Xvz4riq48LMQbjcVQj0XvH941yPh+P8xOi00SGaQRaWp55Vyr4YKGbV0mEDz1
+r1o=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIF/zCCA+egAwIBAgIRAKwYju1QWxUZpn6D1gOtwgQwDQYJKoZIhvcNAQEMBQAw
+gZcxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEwMC4GA1UEAwwn
+QW1hem9uIFJEUyBldS13ZXN0LTEgUm9vdCBDQSBSU0E0MDk2IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyMDE2NTM1NFoYDzIxMjEwNTIwMTc1MzU0WjCBlzEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdBbWF6
+b24gUkRTIGV1LXdlc3QtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCKdBP1U4lqWWkc
+Cb25/BKRTsvNVnISiKocva8GAzJyKfcGRa85gmgu41U+Hz6+39K+XkRfM0YS4BvQ
+F1XxWT0bNyypuvwCvmYShSTjN1TY0ltncDddahTajE/4MdSOZb/c98u0yt03cH+G
+hVwRyT50h0v/UEol50VfwcVAEZEgcQQYhf1IFUFlIvKpmDOqLuFakOnc7c9akK+i
+ivST+JO1tgowbnNkn2iLlSSgUWgb1gjaOsNfysagv1RXdlyPw3EyfwkFifAQvF2P
+Q0ayYZfYS640cccv7efM1MSVyFHR9PrrDsF/zr2S2sGPbeHr7R/HwLl+S5J/l9N9
+y0rk6IHAWV4dEkOvgpnuJKURwA48iu1Hhi9e4moNS6eqoK2KmY3VFpuiyWcA73nH
+GSmyaH+YuMrF7Fnuu7GEHZL/o6+F5cL3mj2SJJhL7sz0ryf5Cs5R4yN9BIEj/f49
+wh84pM6nexoI0Q4wiSFCxWiBpjSmOK6h7z6+2utaB5p20XDZHhxAlmlx4vMuWtjh
+XckgRFxc+ZpVMU3cAHUpVEoO49e/+qKEpPzp8Xg4cToKw2+AfTk3cmyyXQfGwXMQ
+ZUHNZ3w9ILMWihGCM2aGUsLcGDRennvNmnmin/SENsOQ8Ku0/a3teEzwV9cmmdYz
+5iYs1YtgPvKFobY6+T2RXXh+A5kprwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBSyUrsQVnKmA8z6/2Ech0rCvqpNmTAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQEMBQADggIBAFlj3IFmgiFz5lvTzFTRizhVofhTJsGr14Yfkuc7
+UrXPuXOwJomd4uot2d/VIeGJpfnuS84qGdmQyGewGTJ9inatHsGZgHl9NHNWRwKZ
+lTKTbBiq7aqgtUSFa06v202wpzU+1kadxJJePrbABxiXVfOmIW/a1a4hPNcT3syH
+FIEg1+CGsp71UNjBuwg3JTKWna0sLSKcxLOSOvX1fzxK5djzVpEsvQMB4PSAzXca
+vENgg2ErTwgTA+4s6rRtiBF9pAusN1QVuBahYP3ftrY6f3ycS4K65GnqscyfvKt5
+YgjtEKO3ZeeX8NpubMbzC+0Z6tVKfPFk/9TXuJtwvVeqow0YMrLLyRiYvK7EzJ97
+rrkxoKnHYQSZ+rH2tZ5SE392/rfk1PJL0cdHnkpDkUDO+8cKsFjjYKAQSNC52sKX
+74AVh6wMwxYwVZZJf2/2XxkjMWWhKNejsZhUkTISSmiLs+qPe3L67IM7GyKm9/m6
+R3r8x6NGjhTsKH64iYJg7AeKeax4b2e4hBb6GXFftyOs7unpEOIVkJJgM6gh3mwn
+R7v4gwFbLKADKt1vHuerSZMiTuNTGhSfCeDM53XI/mjZl2HeuCKP1mCDLlaO+gZR
+Q/G+E0sBKgEX4xTkAc3kgkuQGfExdGtnN2U2ehF80lBHB8+2y2E+xWWXih/ZyIcW
+wOx+
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGBDCCA+ygAwIBAgIQM4C8g5iFRucSWdC8EdqHeDANBgkqhkiG9w0BAQwFADCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIGV1LWNlbnRyYWwtMSBSb290IENBIFJTQTQwOTYgRzExEDAOBgNV
+BAcMB1NlYXR0bGUwIBcNMjEwNTIxMjIyODI2WhgPMjEyMTA1MjEyMzI4MjZaMIGa
+MQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5j
+LjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMzAxBgNVBAMMKkFt
+YXpvbiBSRFMgZXUtY2VudHJhbC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANeTsD/u
+6saPiY4Sg0GlJlMXMBltnrcGAEkwq34OKQ0bCXqcoNJ2rcAMmuFC5x9Ho1Y3YzB7
+NO2GpIh6bZaO76GzSv4cnimcv9n/sQSYXsGbPD+bAtnN/RvNW1avt4C0q0/ghgF1
+VFS8JihIrgPYIArAmDtGNEdl5PUrdi9y6QGggbRfidMDdxlRdZBe1C18ZdgERSEv
+UgSTPRlVczONG5qcQkUGCH83MMqL5MKQiby/Br5ZyPq6rxQMwRnQ7tROuElzyYzL
+7d6kke+PNzG1mYy4cbYdjebwANCtZ2qYRSUHAQsOgybRcSoarv2xqcjO9cEsDiRU
+l97ToadGYa4VVERuTaNZxQwrld4mvzpyKuirqZltOqg0eoy8VUsaRPL3dc5aChR0
+dSrBgRYmSAClcR2/2ZCWpXemikwgt031Dsc0A/+TmVurrsqszwbr0e5xqMow9LzO
+MI/JtLd0VFtoOkL/7GG2tN8a+7gnLFxpv+AQ0DH5n4k/BY/IyS+H1erqSJhOTQ11
+vDOFTM5YplB9hWV9fp5PRs54ILlHTlZLpWGs3I2BrJwzRtg/rOlvsosqcge9ryai
+AKm2j+JBg5wJ19R8oxRy8cfrNTftZePpISaLTyV2B16w/GsSjqixjTQe9LRN2DHk
+cC+HPqYyzW2a3pUVyTGHhW6a7YsPBs9yzt6hAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFIqA8QkOs2cSirOpCuKuOh9VDfJfMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQwFAAOCAgEAOUI90mEIsa+vNJku0iUwdBMnHiO4gm7E
+5JloP7JG0xUr7d0hypDorMM3zVDAL+aZRHsq8n934Cywj7qEp1304UF6538ByGdz
+tkfacJsUSYfdlNJE9KbA4T+U+7SNhj9jvePpVjdQbhgzxITE9f8CxY/eM40yluJJ
+PhbaWvOiRagzo74wttlcDerzLT6Y/JrVpWhnB7IY8HvzK+BwAdaCsBUPC3HF+kth
+CIqLq7J3YArTToejWZAp5OOI6DLPM1MEudyoejL02w0jq0CChmZ5i55ElEMnapRX
+7GQTARHmjgAOqa95FjbHEZzRPqZ72AtZAWKFcYFNk+grXSeWiDgPFOsq6mDg8DDB
+0kfbYwKLFFCC9YFmYzR2YrWw2NxAScccUc2chOWAoSNHiqBbHR8ofrlJSWrtmKqd
+YRCXzn8wqXnTS3NNHNccqJ6dN+iMr9NGnytw8zwwSchiev53Fpc1mGrJ7BKTWH0t
+ZrA6m32wzpMymtKozlOPYoE5mtZEzrzHEXfa44Rns7XIHxVQSXVWyBHLtIsZOrvW
+U5F41rQaFEpEeUQ7sQvqUoISfTUVRNDn6GK6YaccEhCji14APLFIvhRQUDyYMIiM
+4vll0F/xgVRHTgDVQ8b8sxdhSYlqB4Wc2Ym41YRz+X2yPqk3typEZBpc4P5Tt1/N
+89cEIGdbjsA=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIQYjbPSg4+RNRD3zNxO1fuKDANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChB
+bWF6b24gUkRTIGV1LW5vcnRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUyNDIwNTkyMVoYDzIwNjEwNTI0MjE1OTIxWjCBmDEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChBbWF6
+b24gUkRTIGV1LW5vcnRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA179eQHxcV0YL
+XMkqEmhSBazHhnRVd8yICbMq82PitE3BZcnv1Z5Zs/oOgNmMkOKae4tCXO/41JCX
+wAgbs/eWWi+nnCfpQ/FqbLPg0h3dqzAgeszQyNl9IzTzX4Nd7JFRBVJXPIIKzlRf
++GmFsAhi3rYgDgO27pz3ciahVSN+CuACIRYnA0K0s9lhYdddmrW/SYeWyoB7jPa2
+LmWpAs7bDOgS4LlP2H3eFepBPgNufRytSQUVA8f58lsE5w25vNiUSnrdlvDrIU5n
+Qwzc7NIZCx4qJpRbSKWrUtbyJriWfAkGU7i0IoainHLn0eHp9bWkwb9D+C/tMk1X
+ERZw2PDGkwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSFmR7s
+dAblusFN+xhf1ae0KUqhWTAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBAHsXOpjPMyH9lDhPM61zYdja1ebcMVgfUvsDvt+w0xKMKPhBzYDMs/cFOi1N
+Q8LV79VNNfI2NuvFmGygcvTIR+4h0pqqZ+wjWl3Kk5jVxCrbHg3RBX02QLumKd/i
+kwGcEtTUvTssn3SM8bgM0/1BDXgImZPC567ciLvWDo0s/Fe9dJJC3E0G7d/4s09n
+OMdextcxFuWBZrBm/KK3QF0ByA8MG3//VXaGO9OIeeOJCpWn1G1PjT1UklYhkg61
+EbsTiZVA2DLd1BGzfU4o4M5mo68l0msse/ndR1nEY6IywwpgIFue7+rEleDh6b9d
+PYkG1rHVw2I0XDG4o17aOn5E94I=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIQC6W4HFghUkkgyQw14a6JljANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChB
+bWF6b24gUkRTIGV1LXNvdXRoLTIgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIyMDUyMzE4MTYzMloYDzIwNjIwNTIzMTkxNjMyWjCBmDEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChBbWF6
+b24gUkRTIGV1LXNvdXRoLTIgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAiM/t4FV2R9Nx
+UQG203UY83jInTa/6TMq0SPyg617FqYZxvz2kkx09x3dmxepUg9ttGMlPgjsRZM5
+LCFEi1FWk+hxHzt7vAdhHES5tdjwds3aIkgNEillmRDVrUsbrDwufLaa+MMDO2E1
+wQ/JYFXw16WBCCi2g1EtyQ2Xp+tZDX5IWOTnvhZpW8vVDptZ2AcJ5rMhfOYO3OsK
+5EF0GGA5ldzuezP+BkrBYGJ4wVKGxeaq9+5AT8iVZrypjwRkD7Y5CurywK3+aBwm
+s9Q5Nd8t45JCOUzYp92rFKsCriD86n/JnEvgDfdP6Hvtm0/DkwXK40Wz2q0Zrd0k
+mjP054NRPwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRR7yqd
+SfKcX2Q8GzhcVucReIpewTAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBAEszBRDwXcZyNm07VcFwI1Im94oKwKccuKYeJEsizTBsVon8VpEiMwDs+yGu
+3p8kBhvkLwWybkD/vv6McH7T5b9jDX2DoOudqYnnaYeypsPH/00Vh3LvKagqzQza
+orWLx+0tLo8xW4BtU+Wrn3JId8LvAhxyYXTn9bm+EwPcStp8xGLwu53OPD1RXYuy
+uu+3ps/2piP7GVfou7H6PRaqbFHNfiGg6Y+WA0HGHiJzn8uLmrRJ5YRdIOOG9/xi
+qTmAZloUNM7VNuurcMM2hWF494tQpsQ6ysg2qPjbBqzlGoOt3GfBTOZmqmwmqtam
+K7juWM/mdMQAJ3SMlE5wI8nVdx4=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjSgAwIBAgIRAL9SdzVPcpq7GOpvdGoM80IwCgYIKoZIzj0EAwMwgZYx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTEvMC0GA1UEAwwmQW1h
+em9uIFJEUyBldS13ZXN0LTEgUm9vdCBDQSBFQ0MzODQgRzExEDAOBgNVBAcMB1Nl
+YXR0bGUwIBcNMjEwNTIwMTY1ODA3WhgPMjEyMTA1MjAxNzU4MDdaMIGWMQswCQYD
+VQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjETMBEG
+A1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExLzAtBgNVBAMMJkFtYXpvbiBS
+RFMgZXUtd2VzdC0xIFJvb3QgQ0EgRUNDMzg0IEcxMRAwDgYDVQQHDAdTZWF0dGxl
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEJWDgXebvwjR+Ce+hxKOLbnsfN5W5dOlP
+Zn8kwWnD+SLkU81Eac/BDJsXGrMk6jFD1vg16PEkoSevsuYWlC8xR6FmT6F6pmeh
+fsMGOyJpfK4fyoEPhKeQoT23lFIc5Orjo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0G
+A1UdDgQWBBSVNAN1CHAz0eZ77qz2adeqjm31TzAOBgNVHQ8BAf8EBAMCAYYwCgYI
+KoZIzj0EAwMDaAAwZQIxAMlQeHbcjor49jqmcJ9gRLWdEWpXG8thIf6zfYQ/OEAg
+d7GDh4fR/OUk0VfjsBUN/gIwZB0bGdXvK38s6AAE/9IT051cz/wMe9GIrX1MnL1T
+1F5OqnXJdiwfZRRTHsRQ/L00
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGBDCCA+ygAwIBAgIQalr16vDfX4Rsr+gfQ4iVFDANBgkqhkiG9w0BAQwFADCB
+mjELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTMwMQYDVQQDDCpB
+bWF6b24gUkRTIGV1LWNlbnRyYWwtMiBSb290IENBIFJTQTQwOTYgRzExEDAOBgNV
+BAcMB1NlYXR0bGUwIBcNMjIwNjA2MjEyNTIzWhgPMjEyMjA2MDYyMjI1MjNaMIGa
+MQswCQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5j
+LjETMBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMzAxBgNVBAMMKkFt
+YXpvbiBSRFMgZXUtY2VudHJhbC0yIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANbHbFg7
+2VhZor1YNtez0VlNFaobS3PwOMcEn45BE3y7HONnElIIWXGQa0811M8V2FnyqnE8
+Z5aO1EuvijvWf/3D8DPZkdmAkIfh5hlZYY6Aatr65kEOckwIAm7ZZzrwFogYuaFC
+z/q0CW+8gxNK+98H/zeFx+IxiVoPPPX6UlrLvn+R6XYNERyHMLNgoZbbS5gGHk43
+KhENVv3AWCCcCc85O4rVd+DGb2vMVt6IzXdTQt6Kih28+RGph+WDwYmf+3txTYr8
+xMcCBt1+whyCPlMbC+Yn/ivtCO4LRf0MPZDRQrqTTrFf0h/V0BGEUmMGwuKgmzf5
+Kl9ILdWv6S956ioZin2WgAxhcn7+z//sN++zkqLreSf90Vgv+A7xPRqIpTdJ/nWG
+JaAOUofBfsDsk4X4SUFE7xJa1FZAiu2lqB/E+y7jnWOvFRalzxVJ2Y+D/ZfUfrnK
+4pfKtyD1C6ni1celrZrAwLrJ3PoXPSg4aJKh8+CHex477SRsGj8KP19FG8r0P5AG
+8lS1V+enFCNvT5KqEBpDZ/Y5SQAhAYFUX+zH4/n4ql0l/emS+x23kSRrF+yMkB9q
+lhC/fMk6Pi3tICBjrDQ8XAxv56hfud9w6+/ljYB2uQ1iUYtlE3JdIiuE+3ws26O8
+i7PLMD9zQmo+sVi12pLHfBHQ6RRHtdVRXbXRAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFBFot08ipEL9ZUXCG4lagmF53C0/MA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQwFAAOCAgEAi2mcZi6cpaeqJ10xzMY0F3L2eOKYnlEQ
+h6QyhmNKCUF05q5u+cok5KtznzqMwy7TFOZtbVHl8uUX+xvgq/MQCxqFAnuStBXm
+gr2dg1h509ZwvTdk7TDxGdftvPCfnPNJBFbMSq4CZtNcOFBg9Rj8c3Yj+Qvwd56V
+zWs65BUkDNJrXmxdvhJZjUkMa9vi/oFN+M84xXeZTaC5YDYNZZeW9706QqDbAVES
+5ulvKLavB8waLI/lhRBK5/k0YykCMl0A8Togt8D1QsQ0eWWbIM8/HYJMPVFhJ8Wj
+vT1p/YVeDA3Bo1iKDOttgC5vILf5Rw1ZEeDxjf/r8A7VS13D3OLjBmc31zxRTs3n
+XvHKP9MieQHn9GE44tEYPjK3/yC6BDFzCBlvccYHmqGb+jvDEXEBXKzimdC9mcDl
+f4BBQWGJBH5jkbU9p6iti19L/zHhz7qU6UJWbxY40w92L9jS9Utljh4A0LCTjlnR
+NQUgjnGC6K+jkw8hj0LTC5Ip87oqoT9w7Av5EJ3VJ4hcnmNMXJJ1DkWYdnytcGpO
+DMVITQzzDZRwhbitCVPHagTN2wdi9TEuYE33J0VmFeTc6FSI50wP2aOAZ0Q1/8Aj
+bxeM5jS25eaHc2CQAuhrc/7GLnxOcPwdWQb2XWT8eHudhMnoRikVv/KSK3mf6om4
+1YfpdH2jp30=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQTDc+UgTRtYO7ZGTQ8UWKDDANBgkqhkiG9w0BAQsFADCB
+lzELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTAwLgYDVQQDDCdB
+bWF6b24gUkRTIGV1LXdlc3QtMiBSb290IENBIFJTQTIwNDggRzExEDAOBgNVBAcM
+B1NlYXR0bGUwIBcNMjEwNTIxMjI0NjI0WhgPMjA2MTA1MjEyMzQ2MjRaMIGXMQsw
+CQYDVQQGEwJVUzEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNlcywgSW5jLjET
+MBEGA1UECwwKQW1hem9uIFJEUzELMAkGA1UECAwCV0ExMDAuBgNVBAMMJ0FtYXpv
+biBSRFMgZXUtd2VzdC0yIFJvb3QgQ0EgUlNBMjA0OCBHMTEQMA4GA1UEBwwHU2Vh
+dHRsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM1oGtthQ1YiVIC2
+i4u4swMAGxAjc/BZp0yq0eP5ZQFaxnxs7zFAPabEWsrjeDzrRhdVO0h7zskrertP
+gblGhfD20JfjvCHdP1RUhy/nzG+T+hn6Takan/GIgs8grlBMRHMgBYHW7tklhjaH
+3F7LujhceAHhhgp6IOrpb6YTaTTaJbF3GTmkqxSJ3l1LtEoWz8Al/nL/Ftzxrtez
+Vs6ebpvd7sw37sxmXBWX2OlvUrPCTmladw9OrllGXtCFw4YyLe3zozBlZ3cHzQ0q
+lINhpRcajTMfZrsiGCkQtoJT+AqVJPS2sHjqsEH8yiySW9Jbq4zyMbM1yqQ2vnnx
+MJgoYMcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUaQG88UnV
+JPTI+Pcti1P+q3H7pGYwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4IB
+AQBAkgr75V0sEJimC6QRiTVWEuj2Khy7unjSfudbM6zumhXEU2/sUaVLiYy6cA/x
+3v0laDle6T07x9g64j5YastE/4jbzrGgIINFlY0JnaYmR3KZEjgi1s1fkRRf3llL
+PJm9u4Q1mbwAMQK/ZjLuuRcL3uRIHJek18nRqT5h43GB26qXyvJqeYYpYfIjL9+/
+YiZAbSRRZG+Li23cmPWrbA1CJY121SB+WybCbysbOXzhD3Sl2KSZRwSw4p2HrFtV
+1Prk0dOBtZxCG9luf87ultuDZpfS0w6oNBAMXocgswk24ylcADkkFxBWW+7BETn1
+EpK+t1Lm37mU4sxtuha00XAi
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIQcY44/8NUvBwr6LlHfRy7KjANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIElu
+Yy4xEzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChB
+bWF6b24gUkRTIGV1LXNvdXRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQH
+DAdTZWF0dGxlMCAXDTIxMDUxOTE4MjcxOFoYDzIwNjEwNTE5MTkyNzE4WjCBmDEL
+MAkGA1UEBhMCVVMxIjAgBgNVBAoMGUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4x
+EzARBgNVBAsMCkFtYXpvbiBSRFMxCzAJBgNVBAgMAldBMTEwLwYDVQQDDChBbWF6
+b24gUkRTIGV1LXNvdXRoLTEgUm9vdCBDQSBSU0EyMDQ4IEcxMRAwDgYDVQQHDAdT
+ZWF0dGxlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0UaBeC+Usalu
+EtXnV7+PnH+gi7/71tI/jkKVGKuhD2JDVvqLVoqbMHRh3+wGMvqKCjbHPcC2XMWv
+566fpAj4UZ9CLB5fVzss+QVNTl+FH2XhEzigopp+872ajsNzcZxrMkifxGb4i0U+
+t0Zi+UrbL5tsfP2JonKR1crOrbS6/DlzHBjIiJazGOQcMsJjNuTOItLbMohLpraA
+/nApa3kOvI7Ufool1/34MG0+wL3UUA4YkZ6oBJVxjZvvs6tI7Lzz/SnhK2widGdc
+snbLqBpHNIZQSorVoiwcFaRBGYX/uzYkiw44Yfa4cK2V/B5zgu1Fbr0gbI2am4eh
+yVYyg4jPawIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBS9gM1m
+IIjyh9O5H/7Vj0R/akI7UzAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBAF0Sm9HC2AUyedBVnwgkVXMibnYChOzz7T+0Y+fOLXYAEXex2s8oqGeZdGYX
+JHkjBn7JXu7LM+TpTbPbFFDoc1sgMguD/ls+8XsqAl1CssW+amryIL+jfcfbgQ+P
+ICwEUD9hGdjBgJ5WcuS+qqxHsEIlFNci3HxcxfBa9VsWs5TjI7Vsl4meL5lf7ZyL
+wDV7dHRuU+cImqG1MIvPRIlvPnT7EghrCYi2VCPhP2pM/UvShuwVnkz4MJ29ebIk
+WR9kpblFxFdE92D5UUvMCjC2kmtgzNiErvTcwIvOO9YCbBHzRB1fFiWrXUHhJWq9
+IkaxR5icb/IpAV0A1lYZEWMVsfQ=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIGATCCA+mgAwIBAgIRAMa0TPL+QgbWfUPpYXQkf8wwDQYJKoZIhvcNAQEMBQAw
+gZgxCzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJ
+bmMuMRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwo
+QW1hem9uIFJEUyBldS1ub3J0aC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UE
+BwwHU2VhdHRsZTAgFw0yMTA1MjQyMTAzMjBaGA8yMTIxMDUyNDIyMDMyMFowgZgx
+CzAJBgNVBAYTAlVTMSIwIAYDVQQKDBlBbWF6b24gV2ViIFNlcnZpY2VzLCBJbmMu
+MRMwEQYDVQQLDApBbWF6b24gUkRTMQswCQYDVQQIDAJXQTExMC8GA1UEAwwoQW1h
+em9uIFJEUyBldS1ub3J0aC0xIFJvb3QgQ0EgUlNBNDA5NiBHMTEQMA4GA1UEBwwH
+U2VhdHRsZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANhS9LJVJyWp
+6Rudy9t47y6kzvgnFYDrvJVtgEK0vFn5ifdlHE7xqMz4LZqWBFTnS+3oidwVRqo7
+tqsuuElsouStO8m315/YUzKZEPmkw8h5ufWt/lg3NTCoUZNkB4p4skr7TspyMUwE
+VdlKQuWTCOLtofwmWT+BnFF3To6xTh3XPlT3ssancw27Gob8kJegD7E0TSMVsecP
+B8je65+3b8CGwcD3QB3kCTGLy87tXuS2+07pncHvjMRMBdDQQQqhXWsRSeUNg0IP
+xdHTWcuwMldYPWK5zus9M4dCNBDlmZjKdcZZVUOKeBBAm7Uo7CbJCk8r/Fvfr6mw
+nXXDtuWhqn/WhJiI/y0QU27M+Hy5CQMxBwFsfAjJkByBpdXmyYxUgTmMpLf43p7H
+oWfH1xN0cT0OQEVmAQjMakauow4AQLNkilV+X6uAAu3STQVFRSrpvMen9Xx3EPC3
+G9flHueTa71bU65Xe8ZmEmFhGeFYHY0GrNPAFhq9RThPRY0IPyCZe0Th8uGejkek
+jQjm0FHPOqs5jc8CD8eJs4jSEFt9lasFLVDcAhx0FkacLKQjGHvKAnnbRwhN/dF3
+xt4oL8Z4JGPCLau056gKnYaEyviN7PgO+IFIVOVIdKEBu2ASGE8/+QJB5bcHefNj
+04hEkDW0UYJbSfPpVbGAR0gFI/QpycKnAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wHQYDVR0OBBYEFFMXvvjoaGGUcul8GA3FT05DLbZcMA4GA1UdDwEB/wQEAwIB
+hjANBgkqhkiG9w0BAQwFAAOCAgEAQLwFhd2JKn4K/6salLyIA4mP58qbA/9BTB/r
+D9l0bEwDlVPSdY7R3gZCe6v7SWLfA9RjE5tdWDrQMi5IU6W2OVrVsZS/yGJfwnwe
+a/9iUAYprA5QYKDg37h12XhVsDKlYCekHdC+qa5WwB1SL3YUprDLPWeaIQdg+Uh2
++LxvpZGoxoEbca0fc7flwq9ke/3sXt/3V4wJDyY6AL2YNdjFzC+FtYjHHx8rYxHs
+aesP7yunuN17KcfOZBBnSFRrx96k+Xm95VReTEEpwiBqAECqEpMbd+R0mFAayMb1
+cE77GaK5yeC2f67NLYGpkpIoPbO9p9rzoXLE5GpSizMjimnz6QCbXPFAFBDfSzim
+u6azp40kEUO6kWd7rBhqRwLc43D3TtNWQYxMve5mTRG4Od+eMKwYZmQz89BQCeqm
+aZiJP9y9uwJw4p/A5V3lYHTDQqzmbOyhGUk6OdpdE8HXs/1ep1xTT20QDYOx3Ekt
+r4mmNYfH/8v9nHNRlYJOqFhmoh1i85IUl5IHhg6OT5ZTTwsGTSxvgQQXrmmHVrgZ
+rZIqyBKllCgVeB9sMEsntn4bGLig7CS/N1y2mYdW/745yCLZv2gj0NXhPqgEIdVV
+f9DhFD4ohE1C63XP0kOQee+LYg/MY5vH8swpCSWxQgX5icv5jVDz8YTdCKgUc5u8
+rM2p0kk=
+-----END CERTIFICATE-----
diff --git a/redash/query_runner/google_analytics.py b/redash/query_runner/google_analytics.py
index 9956f5d5f3..dede7340e7 100644
--- a/redash/query_runner/google_analytics.py
+++ b/redash/query_runner/google_analytics.py
@@ -3,19 +3,27 @@
from datetime import datetime
from urllib.parse import parse_qs, urlparse
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ register,
+)
+from redash.utils import json_loads
logger = logging.getLogger(__name__)
try:
- from oauth2client.service_account import ServiceAccountCredentials
+ import google.auth
from apiclient.discovery import build
from apiclient.errors import HttpError
- import httplib2
+ from google.oauth2.service_account import Credentials
enabled = True
-except ImportError as e:
+except ImportError:
enabled = False
@@ -48,9 +56,7 @@ def parse_ga_response(response):
d = {}
for c, value in enumerate(r):
column_name = response["columnHeaders"][c]["name"]
- column_type = [col for col in columns if col["name"] == column_name][0][
- "type"
- ]
+ column_type = [col for col in columns if col["name"] == column_name][0]["type"]
# mcf results come a bit different than ga results:
if isinstance(value, dict):
@@ -59,9 +65,7 @@ def parse_ga_response(response):
elif "conversionPathValue" in value:
steps = []
for step in value["conversionPathValue"]:
- steps.append(
- "{}:{}".format(step["interactionType"], step["nodeValue"])
- )
+ steps.append("{}:{}".format(step["interactionType"], step["nodeValue"]))
value = ", ".join(steps)
else:
raise Exception("Results format not supported")
@@ -74,9 +78,7 @@ def parse_ga_response(response):
elif len(value) == 12:
value = datetime.strptime(value, "%Y%m%d%H%M")
else:
- raise Exception(
- "Unknown date/time format in results: '{}'".format(value)
- )
+ raise Exception("Unknown date/time format in results: '{}'".format(value))
d[column_name] = value
rows.append(d)
@@ -103,8 +105,8 @@ def enabled(cls):
def configuration_schema(cls):
return {
"type": "object",
- "properties": {"jsonKeyFile": {"type": "string", "title": "JSON Key File"}},
- "required": ["jsonKeyFile"],
+ "properties": {"jsonKeyFile": {"type": "string", "title": "JSON Key File (ADC is used if omitted)"}},
+ "required": [],
"secret": ["jsonKeyFile"],
}
@@ -113,20 +115,18 @@ def __init__(self, configuration):
self.syntax = "json"
def _get_analytics_service(self):
- scope = ["https://www.googleapis.com/auth/analytics.readonly"]
- key = json_loads(b64decode(self.configuration["jsonKeyFile"]))
- creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
- return build("analytics", "v3", http=creds.authorize(httplib2.Http()))
+ scopes = ["https://www.googleapis.com/auth/analytics.readonly"]
+
+ try:
+ key = json_loads(b64decode(self.configuration["jsonKeyFile"]))
+ creds = Credentials.from_service_account_info(key, scopes=scopes)
+ except KeyError:
+ creds = google.auth.default(scopes=scopes)[0]
+
+ return build("analytics", "v3", credentials=creds)
def _get_tables(self, schema):
- accounts = (
- self._get_analytics_service()
- .management()
- .accounts()
- .list()
- .execute()
- .get("items")
- )
+ accounts = self._get_analytics_service().management().accounts().list().execute().get("items")
if accounts is None:
raise Exception("Failed getting accounts.")
else:
@@ -143,9 +143,7 @@ def _get_tables(self, schema):
for property_ in properties:
if "defaultProfileId" in property_ and "name" in property_:
schema[account["name"]]["columns"].append(
- "{0} (ga:{1})".format(
- property_["name"], property_["defaultProfileId"]
- )
+ "{0} (ga:{1})".format(property_["name"], property_["defaultProfileId"])
)
return list(schema.values())
@@ -162,16 +160,14 @@ def run_query(self, query, user):
logger.debug("Analytics is about to execute query: %s", query)
try:
params = json_loads(query)
- except:
- query_string = parse_qs(urlparse(query).query, keep_blank_values=True)
- params = {k.replace('-', '_'): ",".join(v) for k,v in query_string.items()}
+ except Exception:
+ query_string = parse_qs(urlparse(query).query, keep_blank_values=True)
+ params = {k.replace("-", "_"): ",".join(v) for k, v in query_string.items()}
if "mcf:" in params["metrics"] and "ga:" in params["metrics"]:
raise Exception("Can't mix mcf: and ga: metrics.")
- if "mcf:" in params.get("dimensions", "") and "ga:" in params.get(
- "dimensions", ""
- ):
+ if "mcf:" in params.get("dimensions", "") and "ga:" in params.get("dimensions", ""):
raise Exception("Can't mix mcf: and ga: dimensions.")
if "mcf:" in params["metrics"]:
@@ -184,15 +180,14 @@ def run_query(self, query, user):
response = api.get(**params).execute()
data = parse_ga_response(response)
error = None
- json_data = json_dumps(data)
except HttpError as e:
# Make sure we return a more readable error to the end user
error = e._get_reason()
- json_data = None
+ data = None
else:
error = "Wrong query format."
- json_data = None
- return json_data, error
+ data = None
+ return data, error
register(GoogleAnalytics)
diff --git a/redash/query_runner/google_analytics4.py b/redash/query_runner/google_analytics4.py
new file mode 100644
index 0000000000..302e5ae909
--- /dev/null
+++ b/redash/query_runner/google_analytics4.py
@@ -0,0 +1,181 @@
+import datetime
+import logging
+from base64 import b64decode
+
+import requests
+
+from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
+from redash.utils import json_loads
+
+logger = logging.getLogger(__name__)
+
+try:
+ import google.auth
+ import google.auth.transport.requests
+ from google.oauth2.service_account import Credentials
+
+ enabled = True
+except ImportError:
+ enabled = False
+
+types_conv = dict(
+ STRING=TYPE_STRING,
+ INTEGER=TYPE_INTEGER,
+ FLOAT=TYPE_FLOAT,
+ DATE=TYPE_DATE,
+ DATETIME=TYPE_DATETIME,
+)
+
+ga_report_endpoint = "https://analyticsdata.googleapis.com/v1beta/properties/{propertyId}:runReport"
+ga_metadata_endpoint = "https://analyticsdata.googleapis.com/v1beta/properties/{propertyId}/metadata"
+
+
+def format_column_value(column_name, value, columns):
+ column_type = [col for col in columns if col["name"] == column_name][0]["type"]
+
+ if column_type == TYPE_DATE:
+ value = datetime.datetime.strptime(value, "%Y%m%d")
+ elif column_type == TYPE_DATETIME:
+ if len(value) == 10:
+ value = datetime.datetime.strptime(value, "%Y%m%d%H")
+ elif len(value) == 12:
+ value = datetime.datetime.strptime(value, "%Y%m%d%H%M")
+ else:
+ raise Exception("Unknown date/time format in results: '{}'".format(value))
+
+ return value
+
+
+def get_formatted_column_json(column_name):
+ data_type = None
+
+ if column_name == "date":
+ data_type = "DATE"
+ elif column_name == "dateHour":
+ data_type = "DATETIME"
+
+ result = {
+ "name": column_name,
+ "friendly_name": column_name,
+ "type": types_conv.get(data_type, "string"),
+ }
+
+ return result
+
+
+def parse_ga_response(response):
+ columns = []
+
+ for dim_header in response["dimensionHeaders"]:
+ columns.append(get_formatted_column_json(dim_header["name"]))
+
+ for met_header in response["metricHeaders"]:
+ columns.append(get_formatted_column_json(met_header["name"]))
+
+ rows = []
+ for r in response["rows"]:
+ counter = 0
+ d = {}
+ for item in r["dimensionValues"]:
+ column_name = columns[counter]["name"]
+ value = item["value"]
+
+ d[column_name] = format_column_value(column_name, value, columns)
+ counter = counter + 1
+
+ for item in r["metricValues"]:
+ column_name = columns[counter]["name"]
+ value = item["value"]
+
+ d[column_name] = format_column_value(column_name, value, columns)
+ counter = counter + 1
+
+ rows.append(d)
+
+ return {"columns": columns, "rows": rows}
+
+
+class GoogleAnalytics4(BaseQueryRunner):
+ should_annotate_query = False
+
+ @classmethod
+ def type(cls):
+ return "google_analytics4"
+
+ @classmethod
+ def name(cls):
+ return "Google Analytics 4"
+
+ @classmethod
+ def enabled(cls):
+ return enabled
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "propertyId": {"type": "number", "title": "Property Id"},
+ "jsonKeyFile": {"type": "string", "title": "JSON Key File (ADC is used if omitted)"},
+ },
+ "required": ["propertyId"],
+ "secret": ["jsonKeyFile"],
+ }
+
+ def _get_access_token(self):
+ scopes = ["https://www.googleapis.com/auth/analytics.readonly"]
+
+ try:
+ key = json_loads(b64decode(self.configuration["jsonKeyFile"]))
+ creds = Credentials.from_service_account_info(key, scopes=scopes)
+ except KeyError:
+ creds = google.auth.default(scopes=scopes)[0]
+
+ creds.refresh(google.auth.transport.requests.Request())
+
+ return creds.token
+
+ def run_query(self, query, user):
+ access_token = self._get_access_token()
+ params = json_loads(query)
+
+ property_id = self.configuration["propertyId"]
+
+ headers = {"Content-Type": "application/json", "Authorization": f"Bearer {access_token}"}
+
+ url = ga_report_endpoint.replace("{propertyId}", str(property_id))
+ r = requests.post(url, json=params, headers=headers)
+ r.raise_for_status()
+
+ raw_result = r.json()
+
+ data = parse_ga_response(raw_result)
+
+ error = None
+
+ return data, error
+
+ def test_connection(self):
+ try:
+ access_token = self._get_access_token()
+ property_id = self.configuration["propertyId"]
+
+ url = ga_metadata_endpoint.replace("{propertyId}", str(property_id))
+
+ headers = {"Content-Type": "application/json", "Authorization": f"Bearer {access_token}"}
+
+ r = requests.get(url, headers=headers)
+ r.raise_for_status()
+ except Exception as e:
+ raise Exception(e)
+
+
+register(GoogleAnalytics4)
diff --git a/redash/query_runner/google_search_console.py b/redash/query_runner/google_search_console.py
new file mode 100644
index 0000000000..e0106a023a
--- /dev/null
+++ b/redash/query_runner/google_search_console.py
@@ -0,0 +1,164 @@
+import logging
+from base64 import b64decode
+from datetime import datetime
+
+from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ register,
+)
+from redash.utils import json_loads
+
+logger = logging.getLogger(__name__)
+
+try:
+ import google.auth
+ from apiclient.discovery import build
+ from apiclient.errors import HttpError
+ from google.oauth2.service_account import Credentials
+
+ enabled = True
+except ImportError:
+ enabled = False
+
+
+types_conv = dict(
+ STRING=TYPE_STRING,
+ INTEGER=TYPE_INTEGER,
+ FLOAT=TYPE_FLOAT,
+ DATE=TYPE_DATE,
+ DATETIME=TYPE_DATETIME,
+)
+
+
+def parse_ga_response(response, dimensions):
+ columns = []
+
+ for item in dimensions:
+ if item == "date":
+ data_type = "date"
+ else:
+ data_type = "string"
+ columns.append(
+ {
+ "name": item,
+ "friendly_name": item,
+ "type": data_type,
+ }
+ )
+
+ default_items = ["clicks", "impressions", "ctr", "position"]
+ for item in default_items:
+ columns.append({"name": item, "friendly_name": item, "type": "number"})
+
+ rows = []
+ for r in response.get("rows", []):
+ d = {}
+ for k, value in r.items():
+ if k == "keys":
+ for index, val in enumerate(value):
+ column_name = columns[index]["name"]
+ column_type = columns[index]["type"]
+ val = get_formatted_value(column_type, val)
+ d[column_name] = val
+ else:
+ column_name = k
+ column_type = [col for col in columns if col["name"] == column_name][0]["type"]
+ value = get_formatted_value(column_type, value)
+ d[column_name] = value
+ rows.append(d)
+
+ return {"columns": columns, "rows": rows}
+
+
+def get_formatted_value(column_type, value):
+ if column_type == "number":
+ value = round(value, 2)
+ elif column_type == TYPE_DATE:
+ value = datetime.strptime(value, "%Y-%m-%d")
+ elif column_type == TYPE_DATETIME:
+ if len(value) == 10:
+ value = datetime.strptime(value, "%Y%m%d%H")
+ elif len(value) == 12:
+ value = datetime.strptime(value, "%Y%m%d%H%M")
+ else:
+ raise Exception("Unknown date/time format in results: '{}'".format(value))
+ return value
+
+
+class GoogleSearchConsole(BaseSQLQueryRunner):
+ should_annotate_query = False
+
+ @classmethod
+ def type(cls):
+ return "google_search_console"
+
+ @classmethod
+ def name(cls):
+ return "Google Search Console"
+
+ @classmethod
+ def enabled(cls):
+ return enabled
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "siteURL": {"type": "string", "title": "Site URL"},
+ "jsonKeyFile": {"type": "string", "title": "JSON Key File (ADC is used if omitted)"},
+ },
+ "required": [],
+ "secret": ["jsonKeyFile"],
+ }
+
+ def __init__(self, configuration):
+ super(GoogleSearchConsole, self).__init__(configuration)
+ self.syntax = "json"
+
+ def _get_search_service(self):
+ scopes = ["https://www.googleapis.com/auth/webmasters.readonly"]
+
+ try:
+ key = json_loads(b64decode(self.configuration["jsonKeyFile"]))
+ creds = Credentials.from_service_account_info(key, scopes=scopes)
+ except KeyError:
+ creds = google.auth.default(scopes=scopes)[0]
+
+ return build("searchconsole", "v1", credentials=creds)
+
+ def test_connection(self):
+ try:
+ service = self._get_search_service()
+ service.sites().list().execute()
+ except HttpError as e:
+ # Make sure we return a more readable error to the end user
+ raise Exception(e._get_reason())
+
+ def run_query(self, query, user):
+ logger.debug("Search Analytics is about to execute query: %s", query)
+ params = json_loads(query)
+ site_url = self.configuration["siteURL"]
+ api = self._get_search_service()
+
+ if len(params) > 0:
+ try:
+ response = api.searchanalytics().query(siteUrl=site_url, body=params).execute()
+ data = parse_ga_response(response, params["dimensions"])
+ error = None
+ except HttpError as e:
+ # Make sure we return a more readable error to the end user
+ error = e._get_reason()
+ data = None
+ else:
+ error = "Wrong query format."
+ data = None
+ return data, error
+
+
+register(GoogleSearchConsole)
diff --git a/redash/query_runner/google_spreadsheets.py b/redash/query_runner/google_spreadsheets.py
index c9f159c75b..6ea9757c4e 100644
--- a/redash/query_runner/google_spreadsheets.py
+++ b/redash/query_runner/google_spreadsheets.py
@@ -1,19 +1,32 @@
import logging
+import re
from base64 import b64decode
from dateutil import parser
from requests import Session
from xlsxwriter.utility import xl_col_to_name
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ guess_type,
+ register,
+)
+from redash.utils import json_loads
logger = logging.getLogger(__name__)
try:
+ import google.auth
import gspread
+ from google.auth.exceptions import GoogleAuthError
+ from google.oauth2.service_account import Credentials
from gspread.exceptions import APIError
- from oauth2client.service_account import ServiceAccountCredentials
+ from gspread.exceptions import WorksheetNotFound as GSWorksheetNotFound
enabled = True
except ImportError:
@@ -39,9 +52,7 @@ def _get_columns_and_column_names(row):
duplicate_counter += 1
column_names.append(column_name)
- columns.append(
- {"name": column_name, "friendly_name": column_name, "type": TYPE_STRING}
- )
+ columns.append({"name": column_name, "friendly_name": column_name, "type": TYPE_STRING})
return columns, column_names
@@ -81,14 +92,27 @@ def __init__(self, worksheet_num, worksheet_count):
super(WorksheetNotFoundError, self).__init__(message)
+class WorksheetNotFoundByTitleError(Exception):
+ def __init__(self, worksheet_title):
+ message = "Worksheet title '{}' not found.".format(worksheet_title)
+ super(WorksheetNotFoundByTitleError, self).__init__(message)
+
+
def parse_query(query):
values = query.split("|")
key = values[0] # key of the spreadsheet
- worksheet_num = (
- 0 if len(values) != 2 else int(values[1])
- ) # if spreadsheet contains more than one worksheet - this is the number of it
+ worksheet_num_or_title = 0 # A default value for when a number of inputs is invalid
+ if len(values) == 2:
+ s = values[1].strip()
+ if len(s) > 0:
+ if re.match(r"^\"(.*?)\"$", s):
+ # A string quoted by " means a title of worksheet
+ worksheet_num_or_title = s[1:-1]
+ else:
+ # if spreadsheet contains more than one worksheet - this is the number of it
+ worksheet_num_or_title = int(s)
- return key, worksheet_num
+ return key, worksheet_num_or_title
def parse_worksheet(worksheet):
@@ -102,24 +126,27 @@ def parse_worksheet(worksheet):
columns[j]["type"] = guess_type(value)
column_types = [c["type"] for c in columns]
- rows = [
- dict(zip(column_names, _value_eval_list(row, column_types)))
- for row in worksheet[HEADER_INDEX + 1 :]
- ]
+ rows = [dict(zip(column_names, _value_eval_list(row, column_types))) for row in worksheet[HEADER_INDEX + 1 :]]
data = {"columns": columns, "rows": rows}
return data
-def parse_spreadsheet(spreadsheet, worksheet_num):
- worksheets = spreadsheet.worksheets()
- worksheet_count = len(worksheets)
- if worksheet_num >= worksheet_count:
- raise WorksheetNotFoundError(worksheet_num, worksheet_count)
+def parse_spreadsheet(spreadsheet, worksheet_num_or_title):
+ worksheet = None
+ if isinstance(worksheet_num_or_title, int):
+ worksheet = spreadsheet.get_worksheet_by_index(worksheet_num_or_title)
+ if worksheet is None:
+ worksheet_count = len(spreadsheet.worksheets())
+ raise WorksheetNotFoundError(worksheet_num_or_title, worksheet_count)
+ elif isinstance(worksheet_num_or_title, str):
+ worksheet = spreadsheet.get_worksheet_by_title(worksheet_num_or_title)
+ if worksheet is None:
+ raise WorksheetNotFoundByTitleError(worksheet_num_or_title)
- worksheet = worksheets[worksheet_num].get_all_values()
+ worksheet_values = worksheet.get_all_values()
- return parse_worksheet(worksheet)
+ return parse_worksheet(worksheet_values)
def is_url_key(key):
@@ -137,6 +164,23 @@ def parse_api_error(error):
return message
+class SpreadsheetWrapper:
+ def __init__(self, spreadsheet):
+ self.spreadsheet = spreadsheet
+
+ def worksheets(self):
+ return self.spreadsheet.worksheets()
+
+ def get_worksheet_by_index(self, index):
+ return self.spreadsheet.get_worksheet(index)
+
+ def get_worksheet_by_title(self, title):
+ try:
+ return self.spreadsheet.worksheet(title)
+ except GSWorksheetNotFound:
+ return None
+
+
class TimeoutSession(Session):
def request(self, *args, **kwargs):
kwargs.setdefault("timeout", 300)
@@ -166,16 +210,19 @@ def enabled(cls):
def configuration_schema(cls):
return {
"type": "object",
- "properties": {"jsonKeyFile": {"type": "string", "title": "JSON Key File"}},
- "required": ["jsonKeyFile"],
+ "properties": {"jsonKeyFile": {"type": "string", "title": "JSON Key File (ADC is used if omitted)"}},
+ "required": [],
"secret": ["jsonKeyFile"],
}
def _get_spreadsheet_service(self):
- scope = ["https://spreadsheets.google.com/feeds"]
+ scopes = ["https://spreadsheets.google.com/feeds"]
- key = json_loads(b64decode(self.configuration["jsonKeyFile"]))
- creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
+ try:
+ key = json_loads(b64decode(self.configuration["jsonKeyFile"]))
+ creds = Credentials.from_service_account_info(key, scopes=scopes)
+ except KeyError:
+ creds = google.auth.default(scopes=scopes)[0]
timeout_session = Session()
timeout_session.requests_session = TimeoutSession()
@@ -184,17 +231,21 @@ def _get_spreadsheet_service(self):
return spreadsheetservice
def test_connection(self):
- service = self._get_spreadsheet_service()
test_spreadsheet_key = "1S0mld7LMbUad8LYlo13Os9f7eNjw57MqVC0YiCd1Jis"
try:
+ service = self._get_spreadsheet_service()
service.open_by_key(test_spreadsheet_key).worksheets()
except APIError as e:
+ logger.exception(e)
message = parse_api_error(e)
raise Exception(message)
+ except GoogleAuthError as e:
+ logger.exception(e)
+ raise Exception(str(e))
def run_query(self, query, user):
logger.debug("Spreadsheet is about to execute query: %s", query)
- key, worksheet_num = parse_query(query)
+ key, worksheet_num_or_title = parse_query(query)
try:
spreadsheet_service = self._get_spreadsheet_service()
@@ -204,15 +255,13 @@ def run_query(self, query, user):
else:
spreadsheet = spreadsheet_service.open_by_key(key)
- data = parse_spreadsheet(spreadsheet, worksheet_num)
+ data = parse_spreadsheet(SpreadsheetWrapper(spreadsheet), worksheet_num_or_title)
- return json_dumps(data), None
+ return data, None
except gspread.SpreadsheetNotFound:
return (
None,
- "Spreadsheet ({}) not found. Make sure you used correct id.".format(
- key
- ),
+ "Spreadsheet ({}) not found. Make sure you used correct id.".format(key),
)
except APIError as e:
return None, parse_api_error(e)
diff --git a/redash/query_runner/graphite.py b/redash/query_runner/graphite.py
index 3bbf098598..06bdbc61db 100644
--- a/redash/query_runner/graphite.py
+++ b/redash/query_runner/graphite.py
@@ -3,8 +3,13 @@
import requests
-from redash.query_runner import *
-from redash.utils import json_dumps
+from redash.query_runner import (
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -29,8 +34,7 @@ def _transform_result(response):
}
)
- data = {"columns": columns, "rows": rows}
- return json_dumps(data)
+ return {"columns": columns, "rows": rows}
class Graphite(BaseQueryRunner):
@@ -69,11 +73,7 @@ def test_connection(self):
verify=self.verify,
)
if r.status_code != 200:
- raise Exception(
- "Got invalid response from Graphite (http status code: {0}).".format(
- r.status_code
- )
- )
+ raise Exception("Got invalid response from Graphite (http status code: {0}).".format(r.status_code))
def run_query(self, query, user):
url = "%s%s" % (self.base_url, "&".join(query.split("\n")))
diff --git a/redash/query_runner/hive_ds.py b/redash/query_runner/hive_ds.py
index 1baab89560..cb44a9457e 100644
--- a/redash/query_runner/hive_ds.py
+++ b/redash/query_runner/hive_ds.py
@@ -1,9 +1,17 @@
-import logging
-import sys
import base64
+import logging
-from redash.query_runner import *
-from redash.utils import json_dumps
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ JobTimeoutException,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -71,27 +79,17 @@ def _get_tables(self, schema):
columns_query = "show columns in %s.%s"
for schema_name in [
- a
- for a in [
- str(a["database_name"]) for a in self._run_query_internal(schemas_query)
- ]
- if len(a) > 0
+ a for a in [str(a["database_name"]) for a in self._run_query_internal(schemas_query)] if len(a) > 0
]:
for table_name in [
a
- for a in [
- str(a["tab_name"])
- for a in self._run_query_internal(tables_query % schema_name)
- ]
+ for a in [str(a["tab_name"]) for a in self._run_query_internal(tables_query % schema_name)]
if len(a) > 0
]:
columns = [
a
for a in [
- str(a["field"])
- for a in self._run_query_internal(
- columns_query % (schema_name, table_name)
- )
+ str(a["field"]) for a in self._run_query_internal(columns_query % (schema_name, table_name))
]
if len(a) > 0
]
@@ -140,7 +138,6 @@ def run_query(self, query, user):
rows = [dict(zip(column_names, row)) for row in cursor]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
except (KeyboardInterrupt, JobTimeoutException):
if connection:
@@ -151,12 +148,12 @@ def run_query(self, query, user):
error = e.args[0].status.errorMessage
except AttributeError:
error = str(e)
- json_data = None
+ data = None
finally:
if connection:
connection.close()
- return json_data, error
+ return data, error
class HiveHttp(Hive):
diff --git a/redash/query_runner/ignite.py b/redash/query_runner/ignite.py
new file mode 100644
index 0000000000..bba24fb064
--- /dev/null
+++ b/redash/query_runner/ignite.py
@@ -0,0 +1,174 @@
+import datetime
+import importlib.util
+import logging
+
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ JobTimeoutException,
+ register,
+)
+
+ignite_available = importlib.util.find_spec("pyignite") is not None
+gridgain_available = importlib.util.find_spec("pygridgain") is not None
+
+
+logger = logging.getLogger(__name__)
+
+types_map = {
+ "java.lang.String": TYPE_STRING,
+ "java.lang.Float": TYPE_FLOAT,
+ "java.lang.Double": TYPE_FLOAT,
+ "java.sql.Date": TYPE_DATETIME,
+ "java.sql.Timestamp": TYPE_DATETIME,
+ "java.lang.Long": TYPE_INTEGER,
+ "java.lang.Integer": TYPE_INTEGER,
+ "java.lang.Short": TYPE_INTEGER,
+ "java.lang.Boolean": TYPE_BOOLEAN,
+ "java.lang.Decimal": TYPE_FLOAT,
+}
+
+
+class Ignite(BaseSQLQueryRunner):
+ should_annotate_query = False
+ noop_query = "SELECT 1"
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "user": {"type": "string"},
+ "password": {"type": "string"},
+ "server": {"type": "string", "default": "127.0.0.1:10800"},
+ "tls": {"type": "boolean", "default": False, "title": "Use SSL/TLS connection"},
+ "schema": {"type": "string", "title": "Schema Name", "default": "PUBLIC"},
+ "distributed_joins": {"type": "boolean", "title": "Allow distributed joins", "default": False},
+ "enforce_join_order": {"type": "boolean", "title": "Enforce join order", "default": False},
+ "lazy": {"type": "boolean", "title": "Lazy query execution", "default": True},
+ "gridgain": {"type": "boolean", "title": "Use GridGain libraries", "default": gridgain_available},
+ },
+ "required": ["server"],
+ "secret": ["password"],
+ }
+
+ @classmethod
+ def name(cls):
+ return "Apache Ignite"
+
+ @classmethod
+ def type(cls):
+ return "ignite"
+
+ @classmethod
+ def enabled(cls):
+ return ignite_available or gridgain_available
+
+ def _get_tables(self, schema):
+ query = """
+ SELECT schema_name, table_name, column_name, type
+ FROM SYS.TABLE_COLUMNS
+ WHERE schema_name NOT IN ('SYS') and column_name not in ('_KEY','_VAL');
+ """
+
+ results, error = self.run_query(query, None)
+
+ if error is not None:
+ raise Exception("Failed getting schema.")
+
+ for row in results["rows"]:
+ if row["SCHEMA_NAME"] != self.configuration.get("schema", "PUBLIC"):
+ table_name = "{}.{}".format(row["SCHEMA_NAME"], row["TABLE_NAME"])
+ else:
+ table_name = row["TABLE_NAME"]
+
+ if table_name not in schema:
+ schema[table_name] = {"name": table_name, "columns": []}
+
+ col_type = TYPE_STRING
+ if row["TYPE"] in types_map:
+ col_type = types_map[row["TYPE"]]
+
+ schema[table_name]["columns"].append({"name": row["COLUMN_NAME"], "type": col_type})
+
+ return list(schema.values())
+
+ def normalise_column(self, col):
+ # if it's a datetime, just return the milliseconds
+ if type(col) is tuple and len(col) == 2 and type(col[0]) is datetime.datetime and isinstance(col[1], int):
+ return col[0]
+ else:
+ return col
+
+ def normalise_row(self, row):
+ return [self.normalise_column(col) for col in row]
+
+ def server_to_connection(self, s):
+ st = s.split(":")
+ if len(st) == 1:
+ server = s
+ port = 10800
+ elif len(st) == 2:
+ server = st[0]
+ port = int(st[1])
+ else:
+ server = "unknown"
+ port = 10800
+ return (server, port)
+
+ def _parse_results(self, c):
+ column_names = next(c)
+ columns = [{"name": col, "friendly_name": col.lower()} for col in column_names]
+ rows = [dict(zip(column_names, self.normalise_row(row))) for row in c]
+
+ return (columns, rows)
+
+ def run_query(self, query, user):
+ connection = None
+
+ try:
+ server = self.configuration.get("server", "127.0.0.1:10800")
+ user = self.configuration.get("user", None)
+ password = self.configuration.get("password", None)
+ tls = self.configuration.get("tls", False)
+ distributed_joins = self.configuration.get("distributed_joins", False)
+ enforce_join_order = self.configuration.get("enforce_join_order", False)
+ lazy = self.configuration.get("lazy", True)
+ gridgain = self.configuration.get("gridgain", False)
+
+ if gridgain:
+ from pygridgain import Client
+ else:
+ from pyignite import Client
+
+ connection = Client(username=user, password=password, use_ssl=tls)
+ connection.connect([self.server_to_connection(s) for s in server.split(",")])
+
+ cursor = connection.sql(
+ query,
+ include_field_names=True,
+ distributed_joins=distributed_joins,
+ enforce_join_order=enforce_join_order,
+ lazy=lazy,
+ )
+ logger.debug("Ignite running query: %s", query)
+
+ result = self._parse_results(cursor)
+ data = {"columns": result[0], "rows": result[1]}
+ error = None
+
+ except (KeyboardInterrupt, JobTimeoutException):
+ connection.cancel()
+ raise
+ finally:
+ if connection:
+ connection.close()
+
+ return data, error
+
+
+register(Ignite)
diff --git a/redash/query_runner/impala_ds.py b/redash/query_runner/impala_ds.py
index 9981586142..8a78147346 100644
--- a/redash/query_runner/impala_ds.py
+++ b/redash/query_runner/impala_ds.py
@@ -1,7 +1,15 @@
import logging
-from redash.query_runner import *
-from redash.utils import json_dumps
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ JobTimeoutException,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -10,7 +18,7 @@
from impala.error import DatabaseError, RPCError
enabled = True
-except ImportError as e:
+except ImportError:
enabled = False
COLUMN_NAME = 0
@@ -53,6 +61,7 @@ def configuration_schema(cls):
},
"database": {"type": "string"},
"use_ldap": {"type": "boolean"},
+ "use_ssl": {"type": "boolean"},
"ldap_user": {"type": "string"},
"ldap_password": {"type": "string"},
"timeout": {"type": "number"},
@@ -67,21 +76,13 @@ def type(cls):
def _get_tables(self, schema_dict):
schemas_query = "show schemas;"
- tables_query = "show tables in %s;"
- columns_query = "show column stats %s.%s;"
-
- for schema_name in [
- str(a["name"]) for a in self._run_query_internal(schemas_query)
- ]:
- for table_name in [
- str(a["name"])
- for a in self._run_query_internal(tables_query % schema_name)
- ]:
+ tables_query = "show tables in `%s`;"
+ columns_query = "show column stats `%s`.`%s`;"
+
+ for schema_name in [str(a["name"]) for a in self._run_query_internal(schemas_query)]:
+ for table_name in [str(a["name"]) for a in self._run_query_internal(tables_query % schema_name)]:
columns = [
- str(a["Column"])
- for a in self._run_query_internal(
- columns_query % (schema_name, table_name)
- )
+ str(a["Column"]) for a in self._run_query_internal(columns_query % (schema_name, table_name))
]
if schema_name != "default":
@@ -92,7 +93,6 @@ def _get_tables(self, schema_dict):
return list(schema_dict.values())
def run_query(self, query, user):
-
connection = None
try:
connection = connect(**self.configuration.to_dict())
@@ -119,14 +119,13 @@ def run_query(self, query, user):
rows = [dict(zip(column_names, row)) for row in cursor]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
cursor.close()
except DatabaseError as e:
- json_data = None
+ data = None
error = str(e)
except RPCError as e:
- json_data = None
+ data = None
error = "Metastore Error [%s]" % str(e)
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
@@ -135,7 +134,7 @@ def run_query(self, query, user):
if connection:
connection.close()
- return json_data, error
+ return data, error
register(Impala)
diff --git a/redash/query_runner/influx_db.py b/redash/query_runner/influx_db.py
index 6ca3ecb23c..7f5249b1ac 100644
--- a/redash/query_runner/influx_db.py
+++ b/redash/query_runner/influx_db.py
@@ -1,7 +1,12 @@
import logging
-from redash.query_runner import *
-from redash.utils import json_dumps
+from redash.query_runner import (
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -14,25 +19,36 @@
enabled = False
+TYPES_MAP = {
+ str: TYPE_STRING,
+ int: TYPE_INTEGER,
+ float: TYPE_FLOAT,
+}
+
+
+def _get_type(value):
+ return TYPES_MAP.get(type(value), TYPE_STRING)
+
+
def _transform_result(results):
- result_columns = []
+ column_names = []
result_rows = []
for result in results:
for series in result.raw.get("series", []):
for column in series["columns"]:
- if column not in result_columns:
- result_columns.append(column)
+ if column not in column_names:
+ column_names.append(column)
tags = series.get("tags", {})
for key in tags.keys():
- if key not in result_columns:
- result_columns.append(key)
+ if key not in column_names:
+ column_names.append(key)
for result in results:
for series in result.raw.get("series", []):
for point in series["values"]:
result_row = {}
- for column in result_columns:
+ for column in column_names:
tags = series.get("tags", {})
if column in tags:
result_row[column] = tags[column]
@@ -42,9 +58,12 @@ def _transform_result(results):
result_row[column] = value
result_rows.append(result_row)
- return json_dumps(
- {"columns": [{"name": c} for c in result_columns], "rows": result_rows}
- )
+ if len(result_rows) > 0:
+ result_columns = [{"name": c, "type": _get_type(result_rows[0][c])} for c in result_rows[0].keys()]
+ else:
+ result_columns = [{"name": c, "type": TYPE_STRING} for c in column_names]
+
+ return {"columns": result_columns, "rows": result_rows}
class InfluxDB(BaseQueryRunner):
diff --git a/redash/query_runner/influx_db_v2.py b/redash/query_runner/influx_db_v2.py
new file mode 100644
index 0000000000..1c23ad5ac1
--- /dev/null
+++ b/redash/query_runner/influx_db_v2.py
@@ -0,0 +1,214 @@
+import logging
+import os
+from base64 import b64decode
+from tempfile import NamedTemporaryFile
+from typing import Any, Dict, Optional, Tuple, Type, TypeVar
+
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
+
+try:
+ from influxdb_client import InfluxDBClient
+
+ enabled = True
+except ImportError:
+ enabled = False
+
+logger = logging.getLogger(__name__)
+
+T = TypeVar("T")
+
+TYPES_MAP = {
+ "integer": TYPE_INTEGER,
+ "long": TYPE_INTEGER,
+ "float": TYPE_FLOAT,
+ "double": TYPE_FLOAT,
+ "boolean": TYPE_BOOLEAN,
+ "string": TYPE_STRING,
+ "datetime:RFC3339": TYPE_DATETIME,
+}
+
+
+class InfluxDBv2(BaseQueryRunner):
+ """
+ Query runner for influxdb version 2.
+ """
+
+ should_annotate_query = False
+
+ def _get_influx_kwargs(self) -> Dict:
+ """
+ Determines additional arguments for influxdb client connection.
+ :return: An object with additional arguments for influxdb client.
+ """
+ return {
+ "verify_ssl": self.configuration.get("verify_ssl", None),
+ "cert_file": self._create_cert_file("cert_File"),
+ "cert_key_file": self._create_cert_file("cert_key_File"),
+ "cert_key_password": self.configuration.get("cert_key_password", None),
+ "ssl_ca_cert": self._create_cert_file("ssl_ca_cert_File"),
+ }
+
+ def _create_cert_file(self, key: str) -> str:
+ """
+ Creates a temporary file from base64 encoded content from stored
+ configuration in filesystem.
+ :param key: The key to get the content from configuration object.
+ :return: The name of temporary file.
+ """
+ cert_file_name = None
+
+ if self.configuration.get(key, None) is not None:
+ with NamedTemporaryFile(mode="w", delete=False) as cert_file:
+ cert_bytes = b64decode(self.configuration[key])
+ cert_file.write(cert_bytes.decode("utf-8"))
+ cert_file_name = cert_file.name
+
+ return cert_file_name
+
+ def _cleanup_cert_files(self, influx_kwargs: Dict) -> None:
+ """
+ Deletes temporary stored files in filesystem.
+ """
+ for key in ["cert_file", "cert_key_file", "ssl_ca_cert"]:
+ cert_path = influx_kwargs.get(key, None)
+ if cert_path is not None and os.path.exists(cert_path):
+ os.remove(cert_path)
+
+ @classmethod
+ def configuration_schema(cls: Type[T]) -> Dict:
+ """
+ Defines a configuration schema for this query runner.
+ :param cls: Object of this class.
+ :return: The defined configuration schema.
+ """
+ # files has to end with "File" in name
+ return {
+ "type": "object",
+ "properties": {
+ "url": {"type": "string", "title": "URL"},
+ "org": {"type": "string", "title": "Organization"},
+ "token": {"type": "string", "title": "Token"},
+ "verify_ssl": {"type": "boolean", "title": "Verify SSL", "default": False},
+ "cert_File": {"type": "string", "title": "SSL Client Certificate", "default": None},
+ "cert_key_File": {"type": "string", "title": "SSL Client Key", "default": None},
+ "cert_key_password": {"type": "string", "title": "Password for SSL Client Key", "default": None},
+ "ssl_ca_cert_File": {"type": "string", "title": "SSL Root Certificate", "default": None},
+ },
+ "order": ["url", "org", "token", "cert_File", "cert_key_File", "cert_key_password", "ssl_ca_cert_File"],
+ "required": ["url", "org", "token"],
+ "secret": ["token", "cert_File", "cert_key_File", "cert_key_password", "ssl_ca_cert_File"],
+ "extra_options": ["verify_ssl", "cert_File", "cert_key_File", "cert_key_password", "ssl_ca_cert_File"],
+ }
+
+ @classmethod
+ def enabled(cls: Type[T]) -> bool:
+ """
+ Determines, if this query runner is enabled or not.
+ :param cls: Object of this class.
+ :return: True, if this query runner is enabled; otherwise False.
+ """
+ return enabled
+
+ def test_connection(self) -> None:
+ """
+ Tests the healthiness of the influxdb instance. If it is not healthy,
+ it logs an error message and raises an exception with an appropriate
+ message.
+ :raises Exception: If the remote influxdb instance is not healthy.
+ """
+ try:
+ influx_kwargs = self._get_influx_kwargs()
+ with InfluxDBClient(
+ url=self.configuration["url"],
+ token=self.configuration["token"],
+ org=self.configuration["org"],
+ **influx_kwargs,
+ ) as client:
+ healthy = client.health()
+ if healthy.status == "fail":
+ logger.error("Connection test failed, due to: " f"{healthy.message!r}.")
+ raise Exception("InfluxDB is not healthy. Check logs for more " "information.")
+ except Exception:
+ raise
+ finally:
+ self._cleanup_cert_files(influx_kwargs)
+
+ def _get_type(self, type_: str) -> str:
+ """
+ Determines the internal type of a passed data type which the database
+ uses.
+ :param type_: The type from the database to map to internal datatype.
+ :return: The name of the internal datatype.
+ """
+ return TYPES_MAP.get(type_, "string")
+
+ def _get_data_from_tables(self, tables: Any) -> Dict:
+ """
+ Determines the data of the given tables in an appropriate schema for
+ redash ui to render it. It retrieves all available columns and records
+ from the tables.
+ :param tables: A list of FluxTable instances.
+ :return: An object with columns and rows list.
+ """
+ columns = []
+ rows = []
+
+ for table in tables:
+ for column in table.columns:
+ column_entry = {
+ "name": column.label,
+ "type": self._get_type(column.data_type),
+ "friendly_name": column.label.title(),
+ }
+ if column_entry not in columns:
+ columns.append(column_entry)
+
+ rows.extend([row.values for row in [record for record in table.records]])
+
+ return {"columns": columns, "rows": rows}
+
+ def run_query(self, query: str, user: str) -> Tuple[Optional[str], Optional[str]]:
+ """
+ Runs a given query against the influxdb instance and returns its
+ result.
+ :param query: The query, this runner is executed.
+ :param user: The user who runs the query.
+ :return: A 2-tuple:
+ 1. element: The queried result in an appropriate format for redash
+ ui. If an error occurred, it returns None.
+ 2. element: An error message, if an error occured. None, if no
+ error occurred.
+ """
+ data = None
+ error = None
+
+ try:
+ influx_kwargs = self._get_influx_kwargs()
+ with InfluxDBClient(
+ url=self.configuration["url"],
+ token=self.configuration["token"],
+ org=self.configuration["org"],
+ **influx_kwargs,
+ ) as client:
+ logger.debug(f"InfluxDB got query: {query!r}")
+
+ tables = client.query_api().query(query)
+
+ data = self._get_data_from_tables(tables)
+ except Exception as ex:
+ error = str(ex)
+ finally:
+ self._cleanup_cert_files(influx_kwargs)
+
+ return data, error
+
+
+register(InfluxDBv2)
diff --git a/redash/query_runner/jql.py b/redash/query_runner/jql.py
index d2a4dcfd5d..f7d26aa24c 100644
--- a/redash/query_runner/jql.py
+++ b/redash/query_runner/jql.py
@@ -1,12 +1,12 @@
import re
from collections import OrderedDict
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import TYPE_STRING, BaseHTTPQueryRunner, register
+from redash.utils import json_loads
# TODO: make this more general and move into __init__.py
-class ResultSet(object):
+class ResultSet:
def __init__(self):
self.columns = OrderedDict()
self.rows = []
@@ -26,13 +26,13 @@ def add_column(self, column, column_type=TYPE_STRING):
}
def to_json(self):
- return json_dumps({"rows": self.rows, "columns": list(self.columns.values())})
+ return {"rows": self.rows, "columns": list(self.columns.values())}
def merge(self, set):
self.rows = self.rows + set.rows
-def parse_issue(issue, field_mapping):
+def parse_issue(issue, field_mapping): # noqa: C901
result = OrderedDict()
result["key"] = issue["key"]
@@ -45,9 +45,7 @@ def parse_issue(issue, field_mapping):
# if field mapping with dict member mappings defined get value of each member
for member_name in member_names:
if member_name in v:
- result[
- field_mapping.get_dict_output_field_name(k, member_name)
- ] = v[member_name]
+ result[field_mapping.get_dict_output_field_name(k, member_name)] = v[member_name]
else:
# these special mapping rules are kept for backwards compatibility
@@ -72,9 +70,7 @@ def parse_issue(issue, field_mapping):
if member_name in listItem:
listValues.append(listItem[member_name])
if len(listValues) > 0:
- result[
- field_mapping.get_dict_output_field_name(k, member_name)
- ] = ",".join(listValues)
+ result[field_mapping.get_dict_output_field_name(k, member_name)] = ",".join(listValues)
else:
# otherwise support list values only for non-dict items
@@ -114,7 +110,7 @@ def __init__(cls, query_field_mapping):
member_name = None
# check for member name contained in field name
- member_parser = re.search("(\w+)\.(\w+)", k)
+ member_parser = re.search(r"(\w+)\.(\w+)", k)
if member_parser:
field_name = member_parser.group(1)
member_name = member_parser.group(2)
diff --git a/redash/query_runner/json_ds.py b/redash/query_runner/json_ds.py
index a6cf42eb5e..1a06d82fd2 100644
--- a/redash/query_runner/json_ds.py
+++ b/redash/query_runner/json_ds.py
@@ -1,14 +1,14 @@
+import datetime
import logging
+from urllib.parse import urljoin
+
import yaml
-import datetime
from funcy import compact, project
from redash.utils.requests_session import requests_or_advocate, UnacceptableAddressException
from redash.utils import json_dumps
from redash.query_runner import (
- BaseHTTPQueryRunner,
- register,
TYPE_BOOLEAN,
TYPE_DATETIME,
TYPE_FLOAT,
@@ -59,12 +59,10 @@ def _get_type(value):
def add_column(columns, column_name, column_type):
if _get_column_by_name(columns, column_name) is None:
- columns.append(
- {"name": column_name, "friendly_name": column_name, "type": column_type}
- )
+ columns.append({"name": column_name, "friendly_name": column_name, "type": column_type})
-def _apply_path_search(response, path):
+def _apply_path_search(response, path, default=None):
if path is None:
return response
@@ -74,6 +72,8 @@ def _apply_path_search(response, path):
current_path = path_parts.pop()
if current_path in response:
response = response[current_path]
+ elif default is not None:
+ return default
else:
raise Exception("Couldn't find path {} in response.".format(path))
@@ -81,6 +81,8 @@ def _apply_path_search(response, path):
def _normalize_json(data, path):
+ if not data:
+ return None
data = _apply_path_search(data, path)
if isinstance(data, dict):
@@ -97,9 +99,7 @@ def _sort_columns_with_fields(columns, fields):
# TODO: merge the logic here with the one in MongoDB's queyr runner
-def parse_json(data, path, fields):
- data = _normalize_json(data, path)
-
+def parse_json(data, fields):
rows = []
columns = []
@@ -133,17 +133,19 @@ def parse_json(data, path, fields):
class JSON(BaseHTTPQueryRunner):
requires_url = False
+ base_url_title = "Base URL"
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
+ "base_url": {"type": "string", "title": cls.base_url_title},
"username": {"type": "string", "title": cls.username_title},
"password": {"type": "string", "title": cls.password_title},
},
"secret": ["password"],
- "order": ["username", "password"],
+ "order": ["base_url", "username", "password"],
}
def __init__(self, configuration):
@@ -156,28 +158,36 @@ def test_connection(self):
def run_query(self, query, user):
query = parse_query(query)
+ data, error = self._run_json_query(query)
+ if error is not None:
+ return None, error
+
+ if data:
+ return data, None
+ return None, "Got empty response from '{}'.".format(query["url"])
+
+ def _run_json_query(self, query):
if not isinstance(query, dict):
- raise QueryParseError(
- "Query should be a YAML object describing the URL to query."
- )
+ raise QueryParseError("Query should be a YAML object describing the URL to query.")
if "url" not in query:
raise QueryParseError("Query must include 'url' option.")
-
method = query.get("method", "get")
- request_options = project(query, ("params", "headers", "data", "auth", "json"))
+ request_options = project(query, ("params", "headers", "data", "auth", "json", "verify"))
fields = query.get("fields")
path = query.get("path")
+ if "pagination" in query:
+ pagination = RequestPagination.from_config(self.configuration, query["pagination"])
+ else:
+ pagination = None
+
if isinstance(request_options.get("auth", None), list):
request_options["auth"] = tuple(request_options["auth"])
elif self.configuration.get("username") or self.configuration.get("password"):
- request_options["auth"] = (
- self.configuration.get("username"),
- self.configuration.get("password"),
- )
+ request_options["auth"] = (self.configuration.get("username"), self.configuration.get("password"))
if method not in ("get", "post"):
raise QueryParseError("Only GET or POST methods are allowed.")
@@ -185,19 +195,91 @@ def run_query(self, query, user):
if fields and not isinstance(fields, list):
raise QueryParseError("'fields' needs to be a list.")
- response, error = self.get_response(
- query["url"], http_method=method, **request_options
- )
+ results, error = self._get_all_results(query["url"], method, path, pagination, **request_options)
+ return parse_json(results, fields), error
- if error is not None:
- return None, error
+ def _get_all_results(self, url, method, result_path, pagination, **request_options):
+ """Get all results from a paginated endpoint."""
+ base_url = self.configuration.get("base_url")
+ url = urljoin(base_url, url)
- data = json_dumps(parse_json(response.json(), path, fields))
+ results = []
+ has_more = True
+ while has_more:
+ response, error = self._get_json_response(url, method, **request_options)
+ has_more = False
- if data:
- return data, None
- else:
- return None, "Got empty response from '{}'.".format(query["url"])
+ result = _normalize_json(response, result_path)
+ if result:
+ results.extend(result)
+ if pagination:
+ has_more, url, request_options = pagination.next(url, request_options, response)
+
+ return results, error
+
+ def _get_json_response(self, url, method, **request_options):
+ response, error = self.get_response(url, http_method=method, **request_options)
+ result = response.json() if error is None else {}
+ return result, error
+
+
+class RequestPagination:
+ def next(self, url, request_options, response):
+ """Checks the response for another page.
+
+ Returns:
+ has_more, next_url, next_request_options
+ """
+ return False, None, request_options
+
+ @staticmethod
+ def from_config(configuration, pagination):
+ if not isinstance(pagination, dict) or not isinstance(pagination.get("type"), str):
+ raise QueryParseError("'pagination' should be an object with a `type` property")
+
+ if pagination["type"] == "url":
+ return UrlPagination(pagination)
+ elif pagination["type"] == "token":
+ return TokenPagination(pagination)
+
+ raise QueryParseError("Unknown 'pagination.type' {}".format(pagination["type"]))
+
+
+class UrlPagination(RequestPagination):
+ def __init__(self, pagination):
+ self.path = pagination.get("path", "_links.next.href")
+ if not isinstance(self.path, str):
+ raise QueryParseError("'pagination.path' should be a string")
+
+ def next(self, url, request_options, response):
+ next_url = _apply_path_search(response, self.path, "")
+ if not next_url:
+ return False, None, request_options
+
+ next_url = urljoin(url, next_url)
+ return True, next_url, request_options
+
+
+class TokenPagination(RequestPagination):
+ def __init__(self, pagination):
+ self.fields = pagination.get("fields", ["next_page_token", "page_token"])
+ if not isinstance(self.fields, list) or len(self.fields) != 2:
+ raise QueryParseError("'pagination.fields' should be a list of 2 field names")
+
+ def next(self, url, request_options, response):
+ next_token = _apply_path_search(response, self.fields[0], "")
+ if not next_token:
+ return False, None, request_options
+
+ params = request_options.get("params", {})
+
+ # prevent infinite loop that can happen if self.fields[1] is wrong
+ if next_token == params.get(self.fields[1]):
+ raise Exception("{} did not change; possible misconfiguration".format(self.fields[0]))
+
+ params[self.fields[1]] = next_token
+ request_options["params"] = params
+ return True, url, request_options
register(JSON)
diff --git a/redash/query_runner/kylin.py b/redash/query_runner/kylin.py
index cfc02c671f..18f6ff9c6b 100644
--- a/redash/query_runner/kylin.py
+++ b/redash/query_runner/kylin.py
@@ -1,11 +1,20 @@
-import os
import logging
+import os
+
import requests
from requests.auth import HTTPBasicAuth
from redash import settings
-from redash.query_runner import *
-from redash.utils import json_dumps
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -92,7 +101,7 @@ def run_query(self, query, user):
columns = self.get_columns(data["columnMetas"])
rows = self.get_rows(columns, data["results"])
- return json_dumps({"columns": columns, "rows": rows}), None
+ return {"columns": columns, "rows": rows}, None
def get_schema(self, get_stats=False):
url = self.configuration["url"]
@@ -127,9 +136,7 @@ def get_columns(self, colmetas):
)
def get_rows(self, columns, results):
- return [
- dict(zip((column["name"] for column in columns), row)) for row in results
- ]
+ return [dict(zip((column["name"] for column in columns), row)) for row in results]
def get_table_schema(self, table):
name = table["table_NAME"]
diff --git a/redash/query_runner/mapd.py b/redash/query_runner/mapd.py
deleted file mode 100644
index 45f77cc273..0000000000
--- a/redash/query_runner/mapd.py
+++ /dev/null
@@ -1,109 +0,0 @@
-try:
- import pymapd
-
- enabled = True
-except ImportError:
- enabled = False
-
-from redash.query_runner import BaseSQLQueryRunner, register
-from redash.query_runner import (
- TYPE_STRING,
- TYPE_DATE,
- TYPE_DATETIME,
- TYPE_INTEGER,
- TYPE_FLOAT,
- TYPE_BOOLEAN,
-)
-from redash.utils import json_dumps
-
-TYPES_MAP = {
- 0: TYPE_INTEGER,
- 1: TYPE_INTEGER,
- 2: TYPE_INTEGER,
- 3: TYPE_FLOAT,
- 4: TYPE_FLOAT,
- 5: TYPE_FLOAT,
- 6: TYPE_STRING,
- 7: TYPE_DATE,
- 8: TYPE_DATETIME,
- 9: TYPE_DATE,
- 10: TYPE_BOOLEAN,
- 11: TYPE_DATE,
- 12: TYPE_DATE,
-}
-
-
-class Mapd(BaseSQLQueryRunner):
- @classmethod
- def configuration_schema(cls):
- return {
- "type": "object",
- "properties": {
- "host": {"type": "string", "default": "localhost"},
- "port": {"type": "number", "default": 9091},
- "user": {"type": "string", "default": "mapd", "title": "username"},
- "password": {"type": "string", "default": "HyperInteractive"},
- "database": {"type": "string", "default": "mapd"},
- },
- "order": ["user", "password", "host", "port", "database"],
- "required": ["host", "port", "user", "password", "database"],
- "secret": ["password"],
- }
-
- @classmethod
- def enabled(cls):
- return enabled
-
- def connect_database(self):
- connection = pymapd.connect(
- user=self.configuration["user"],
- password=self.configuration["password"],
- host=self.configuration["host"],
- port=self.configuration["port"],
- dbname=self.configuration["database"],
- )
- return connection
-
- def run_query(self, query, user):
- connection = self.connect_database()
- cursor = connection.cursor()
-
- try:
- cursor.execute(query)
- columns = self.fetch_columns(
- [(i[0], TYPES_MAP.get(i[1], None)) for i in cursor.description]
- )
- rows = [
- dict(zip((column["name"] for column in columns), row)) for row in cursor
- ]
- data = {"columns": columns, "rows": rows}
- error = None
- json_data = json_dumps(data)
- finally:
- cursor.close()
- connection.close()
-
- return json_data, error
-
- def _get_tables(self, schema):
- connection = self.connect_database()
- try:
- for table_name in connection.get_tables():
- schema[table_name] = {"name": table_name, "columns": []}
- for row_column in connection.get_table_details(table_name):
- schema[table_name]["columns"].append(row_column[0])
- finally:
- connection.close
-
- return list(schema.values())
-
- def test_connection(self):
- connection = self.connect_database()
- try:
- tables = connection.get_tables()
- num_tables = tables.count(tables)
- finally:
- connection.close
-
-
-register(Mapd)
diff --git a/redash/query_runner/memsql_ds.py b/redash/query_runner/memsql_ds.py
index d1c235608d..f24c230816 100644
--- a/redash/query_runner/memsql_ds.py
+++ b/redash/query_runner/memsql_ds.py
@@ -1,8 +1,15 @@
import logging
-import sys
-from redash.query_runner import *
-from redash.utils import json_dumps
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ JobTimeoutException,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -71,27 +78,19 @@ def _get_tables(self, schema):
columns_query = "show columns in %s"
for schema_name in [
- a
- for a in [
- str(a["Database"]) for a in self._run_query_internal(schemas_query)
- ]
- if len(a) > 0
+ a for a in [str(a["Database"]) for a in self._run_query_internal(schemas_query)] if len(a) > 0
]:
for table_name in [
a
for a in [
- str(a["Tables_in_%s" % schema_name])
- for a in self._run_query_internal(tables_query % schema_name)
+ str(a["Tables_in_%s" % schema_name]) for a in self._run_query_internal(tables_query % schema_name)
]
if len(a) > 0
]:
table_name = ".".join((schema_name, table_name))
columns = [
a
- for a in [
- str(a["Field"])
- for a in self._run_query_internal(columns_query % table_name)
- ]
+ for a in [str(a["Field"]) for a in self._run_query_internal(columns_query % table_name)]
if len(a) > 0
]
@@ -99,7 +98,6 @@ def _get_tables(self, schema):
return list(schema.values())
def run_query(self, query, user):
-
cursor = None
try:
cursor = database.connect(**self.configuration.to_dict())
@@ -128,12 +126,9 @@ def run_query(self, query, user):
if column_names:
for column in column_names:
- columns.append(
- {"name": column, "friendly_name": column, "type": TYPE_STRING}
- )
+ columns.append({"name": column, "friendly_name": column, "type": TYPE_STRING})
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
except (KeyboardInterrupt, JobTimeoutException):
cursor.close()
@@ -142,7 +137,7 @@ def run_query(self, query, user):
if cursor:
cursor.close()
- return json_data, error
+ return data, error
register(MemSQL)
diff --git a/redash/query_runner/mongodb.py b/redash/query_runner/mongodb.py
index f9ed2130f5..1a1afb4bd7 100644
--- a/redash/query_runner/mongodb.py
+++ b/redash/query_runner/mongodb.py
@@ -4,18 +4,27 @@
from dateutil.parser import parse
-from redash.query_runner import *
-from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
+from redash.utils import json_loads, parse_human_time
logger = logging.getLogger(__name__)
try:
import pymongo
- from bson.objectid import ObjectId
- from bson.timestamp import Timestamp
from bson.decimal128 import Decimal128
- from bson.son import SON
+ from bson.json_util import JSONOptions
from bson.json_util import object_hook as bson_object_hook
+ from bson.objectid import ObjectId
+ from bson.son import SON
+ from bson.timestamp import Timestamp
enabled = True
@@ -33,18 +42,7 @@
}
-class MongoDBJSONEncoder(JSONEncoder):
- def default(self, o):
- if isinstance(o, ObjectId):
- return str(o)
- elif isinstance(o, Timestamp):
- return super(MongoDBJSONEncoder, self).default(o.as_datetime())
- elif isinstance(o, Decimal128):
- return o.to_decimal()
- return super(MongoDBJSONEncoder, self).default(o)
-
-
-date_regex = re.compile('ISODate\("(.*)"\)', re.IGNORECASE)
+date_regex = re.compile(r'ISODate\("(.*)"\)', re.IGNORECASE)
def parse_oids(oids):
@@ -67,10 +65,11 @@ def datetime_parser(dct):
if "$oids" in dct:
return parse_oids(dct["$oids"])
- return bson_object_hook(dct)
+ opts = JSONOptions(tz_aware=True)
+ return bson_object_hook(dct, json_options=opts)
-def parse_query_json(query):
+def parse_query_json(query: str):
query_data = json_loads(query, object_hook=datetime_parser)
return query_data
@@ -83,47 +82,66 @@ def _get_column_by_name(columns, column_name):
return None
-def parse_results(results):
+def _parse_dict(dic: dict, flatten: bool = False) -> dict:
+ res = {}
+
+ def _flatten(x, name=""):
+ if isinstance(x, dict):
+ for k, v in x.items():
+ _flatten(v, "{}.{}".format(name, k))
+ elif isinstance(x, list):
+ for idx, item in enumerate(x):
+ _flatten(item, "{}.{}".format(name, idx))
+ else:
+ res[name[1:]] = x
+
+ if flatten:
+ _flatten(dic)
+ else:
+ for key, value in dic.items():
+ if isinstance(value, dict):
+ for tmp_key, tmp_value in _parse_dict(value).items():
+ new_key = "{}.{}".format(key, tmp_key)
+ res[new_key] = tmp_value
+ else:
+ res[key] = value
+ return res
+
+
+def parse_results(results: list, flatten: bool = False) -> list:
rows = []
columns = []
for row in results:
parsed_row = {}
- for key in row:
- if isinstance(row[key], dict):
- for inner_key in row[key]:
- column_name = "{}.{}".format(key, inner_key)
- if _get_column_by_name(columns, column_name) is None:
- columns.append(
- {
- "name": column_name,
- "friendly_name": column_name,
- "type": TYPES_MAP.get(
- type(row[key][inner_key]), TYPE_STRING
- ),
- }
- )
-
- parsed_row[column_name] = row[key][inner_key]
-
- else:
- if _get_column_by_name(columns, key) is None:
- columns.append(
- {
- "name": key,
- "friendly_name": key,
- "type": TYPES_MAP.get(type(row[key]), TYPE_STRING),
- }
- )
-
- parsed_row[key] = row[key]
+ parsed_row = _parse_dict(row, flatten)
+ for column_name, value in parsed_row.items():
+ if _get_column_by_name(columns, column_name) is None:
+ columns.append(
+ {
+ "name": column_name,
+ "friendly_name": column_name,
+ "type": TYPES_MAP.get(type(value), TYPE_STRING),
+ }
+ )
rows.append(parsed_row)
return rows, columns
+def _sorted_fields(fields):
+ ord = {}
+ for k, v in fields.items():
+ if isinstance(v, int):
+ ord[k] = v
+ else:
+ ord[k] = len(fields)
+
+ return sorted(ord, key=ord.get)
+
+
class MongoDB(BaseQueryRunner):
should_annotate_query = False
@@ -148,6 +166,14 @@ def configuration_schema(cls):
],
"title": "Replica Set Read Preference",
},
+ "flatten": {
+ "type": "string",
+ "extendedEnum": [
+ {"value": "False", "name": "False"},
+ {"value": "True", "name": "True"},
+ ],
+ "title": "Flatten Results",
+ },
},
"secret": ["password"],
"required": ["connectionString", "dbName"],
@@ -165,12 +191,22 @@ def __init__(self, configuration):
self.db_name = self.configuration["dbName"]
self.is_replica_set = (
- True
- if "replicaSetName" in self.configuration
- and self.configuration["replicaSetName"]
- else False
+ True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
)
+ self.flatten = self.configuration.get("flatten", "False").upper() in ["TRUE", "YES", "ON", "1", "Y", "T"]
+ logger.debug("flatten: {}".format(self.flatten))
+
+ @classmethod
+ def custom_json_encoder(cls, dec, o):
+ if isinstance(o, ObjectId):
+ return str(o)
+ elif isinstance(o, Timestamp):
+ return dec.default(o.as_datetime())
+ elif isinstance(o, Decimal128):
+ return o.to_decimal()
+ return None
+
def _get_db(self):
kwargs = {}
if self.is_replica_set:
@@ -185,9 +221,7 @@ def _get_db(self):
if "password" in self.configuration:
kwargs["password"] = self.configuration["password"]
- db_connection = pymongo.MongoClient(
- self.configuration["connectionString"], **kwargs
- )
+ db_connection = pymongo.MongoClient(self.configuration["connectionString"], **kwargs)
return db_connection[self.db_name]
@@ -221,15 +255,21 @@ def _get_collection_fields(self, db, collection_name):
# document written.
collection_is_a_view = self._is_collection_a_view(db, collection_name)
documents_sample = []
- if collection_is_a_view:
- for d in db[collection_name].find().limit(2):
- documents_sample.append(d)
- else:
- for d in db[collection_name].find().sort([("$natural", 1)]).limit(1):
- documents_sample.append(d)
-
- for d in db[collection_name].find().sort([("$natural", -1)]).limit(1):
- documents_sample.append(d)
+ try:
+ if collection_is_a_view:
+ for d in db[collection_name].find().limit(2):
+ documents_sample.append(d)
+ else:
+ for d in db[collection_name].find().sort([("$natural", 1)]).limit(1):
+ documents_sample.append(d)
+
+ for d in db[collection_name].find().sort([("$natural", -1)]).limit(1):
+ documents_sample.append(d)
+ except Exception as ex:
+ template = "An exception of type {0} occurred. Arguments:\n{1!r}"
+ message = template.format(type(ex).__name__, ex.args)
+ logger.error(message)
+ return []
columns = []
for d in documents_sample:
self._merge_property_names(columns, d)
@@ -238,29 +278,28 @@ def _get_collection_fields(self, db, collection_name):
def get_schema(self, get_stats=False):
schema = {}
db = self._get_db()
- for collection_name in db.collection_names():
+ for collection_name in db.list_collection_names():
if collection_name.startswith("system."):
continue
columns = self._get_collection_fields(db, collection_name)
- schema[collection_name] = {
- "name": collection_name,
- "columns": sorted(columns),
- }
+ if columns:
+ schema[collection_name] = {
+ "name": collection_name,
+ "columns": sorted(columns),
+ }
return list(schema.values())
- def run_query(self, query, user):
+ def run_query(self, query, user): # noqa: C901
db = self._get_db()
- logger.debug(
- "mongodb connection string: %s", self.configuration["connectionString"]
- )
+ logger.debug("mongodb connection string: %s", self.configuration["connectionString"])
logger.debug("mongodb got query: %s", query)
try:
query_data = parse_query_json(query)
- except ValueError:
- return None, "Invalid query format. The query is not a valid JSON."
+ except ValueError as error:
+ return None, f"Invalid JSON format. {error.__str__()}"
if "collection" not in query_data:
return None, "'collection' must have a value to run a query"
@@ -276,8 +315,10 @@ def run_query(self, query, user):
if "$sort" in step:
sort_list = []
for sort_item in step["$sort"]:
- sort_list.append((sort_item["name"], sort_item["direction"]))
-
+ if isinstance(sort_item, dict):
+ sort_list.append((sort_item["name"], sort_item.get("direction", 1)))
+ elif isinstance(sort_item, list):
+ sort_list.append(tuple(sort_item))
step["$sort"] = SON(sort_list)
if "fields" in query_data:
@@ -287,26 +328,30 @@ def run_query(self, query, user):
if "sort" in query_data and query_data["sort"]:
s = []
for field_data in query_data["sort"]:
- s.append((field_data["name"], field_data["direction"]))
+ if isinstance(field_data, dict):
+ s.append((field_data["name"], field_data.get("direction", 1)))
+ elif isinstance(field_data, list):
+ s.append(tuple(field_data))
columns = []
rows = []
cursor = None
if q or (not q and not aggregate):
- if s:
- cursor = db[collection].find(q, f).sort(s)
+ if "count" in query_data:
+ options = {opt: query_data[opt] for opt in ("skip", "limit") if opt in query_data}
+ cursor = db[collection].count_documents(q, **options)
else:
- cursor = db[collection].find(q, f)
+ if s:
+ cursor = db[collection].find(q, f).sort(s)
+ else:
+ cursor = db[collection].find(q, f)
- if "skip" in query_data:
- cursor = cursor.skip(query_data["skip"])
+ if "skip" in query_data:
+ cursor = cursor.skip(query_data["skip"])
- if "limit" in query_data:
- cursor = cursor.limit(query_data["limit"])
-
- if "count" in query_data:
- cursor = cursor.count()
+ if "limit" in query_data:
+ cursor = cursor.limit(query_data["limit"])
elif aggregate:
allow_disk_use = query_data.get("allowDiskUse", False)
@@ -323,22 +368,21 @@ def run_query(self, query, user):
cursor = r
if "count" in query_data:
- columns.append(
- {"name": "count", "friendly_name": "count", "type": TYPE_INTEGER}
- )
+ columns.append({"name": "count", "friendly_name": "count", "type": TYPE_INTEGER})
rows.append({"count": cursor})
else:
- rows, columns = parse_results(cursor)
+ rows, columns = parse_results(cursor, flatten=self.flatten)
if f:
ordered_columns = []
- for k in sorted(f, key=f.get):
+ for k in _sorted_fields(f):
column = _get_column_by_name(columns, k)
if column:
ordered_columns.append(column)
columns = ordered_columns
+ logger.debug("columns: {}".format(columns))
if query_data.get("sortColumns"):
reverse = query_data["sortColumns"] == "desc"
@@ -346,9 +390,8 @@ def run_query(self, query, user):
data = {"columns": columns, "rows": rows}
error = None
- json_data = json_dumps(data, cls=MongoDBJSONEncoder)
- return json_data, error
+ return data, error
register(MongoDB)
diff --git a/redash/query_runner/mssql.py b/redash/query_runner/mssql.py
index 73cd38d3a8..4bb53c0d9f 100644
--- a/redash/query_runner/mssql.py
+++ b/redash/query_runner/mssql.py
@@ -1,9 +1,13 @@
import logging
-import sys
-import uuid
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ JobTimeoutException,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -30,6 +34,10 @@ class SqlServer(BaseSQLQueryRunner):
should_annotate_query = False
noop_query = "SELECT 1"
+ limit_query = " TOP 1000"
+ limit_keywords = ["TOP"]
+ limit_after_select = True
+
@classmethod
def configuration_schema(cls):
return {
@@ -80,9 +88,7 @@ def _get_tables(self, schema):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
for row in results["rows"]:
if row["table_schema"] != self.configuration["db"]:
@@ -131,22 +137,17 @@ def run_query(self, query, user):
data = cursor.fetchall()
if cursor.description is not None:
- columns = self.fetch_columns(
- [(i[0], types_map.get(i[1], None)) for i in cursor.description]
- )
- rows = [
- dict(zip((column["name"] for column in columns), row))
- for row in data
- ]
+ columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
else:
error = "No data was returned."
- json_data = None
+ data = None
cursor.close()
+ connection.commit()
except pymssql.Error as e:
try:
# Query errors are at `args[1]`
@@ -154,7 +155,7 @@ def run_query(self, query, user):
except IndexError:
# Connection errors are `args[0][1]`
error = e.args[0][1]
- json_data = None
+ data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
@@ -162,7 +163,7 @@ def run_query(self, query, user):
if connection:
connection.close()
- return json_data, error
+ return data, error
register(SqlServer)
diff --git a/redash/query_runner/mssql_odbc.py b/redash/query_runner/mssql_odbc.py
index 0c02db4138..ad6ca5dab6 100644
--- a/redash/query_runner/mssql_odbc.py
+++ b/redash/query_runner/mssql_odbc.py
@@ -1,10 +1,11 @@
import logging
-import sys
-import uuid
-from redash.query_runner import *
+from redash.query_runner import (
+ BaseSQLQueryRunner,
+ JobTimeoutException,
+ register,
+)
from redash.query_runner.mssql import types_map
-from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -20,6 +21,10 @@ class SQLServerODBC(BaseSQLQueryRunner):
should_annotate_query = False
noop_query = "SELECT 1"
+ limit_query = " TOP 1000"
+ limit_keywords = ["TOP"]
+ limit_after_select = True
+
@classmethod
def configuration_schema(cls):
return {
@@ -35,11 +40,15 @@ def configuration_schema(cls):
"default": "UTF-8",
"title": "Character Set",
},
- "use_ssl": {"type": "boolean", "title": "Use SSL", "default": False,},
+ "use_ssl": {
+ "type": "boolean",
+ "title": "Use SSL",
+ "default": False,
+ },
"verify_ssl": {
"type": "boolean",
"title": "Verify SSL certificate",
- "default": True,
+ "default": False,
},
},
"order": [
@@ -69,6 +78,10 @@ def name(cls):
def type(cls):
return "mssql_odbc"
+ @property
+ def supports_auto_limit(self):
+ return False
+
def _get_tables(self, schema):
query = """
SELECT table_schema, table_name, column_name
@@ -82,9 +95,7 @@ def _get_tables(self, schema):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
for row in results["rows"]:
if row["table_schema"] != self.configuration["db"]:
@@ -108,18 +119,30 @@ def run_query(self, query, user):
password = self.configuration.get("password", "")
db = self.configuration["db"]
port = self.configuration.get("port", 1433)
- charset = self.configuration.get("charset", "UTF-8")
- connection_string_fmt = "DRIVER={{ODBC Driver 17 for SQL Server}};PORT={};SERVER={};DATABASE={};UID={};PWD={}"
- connection_string = connection_string_fmt.format(
- port, server, db, user, password
- )
+ connection_params = {
+ "Driver": "{ODBC Driver 18 for SQL Server}",
+ "Server": server,
+ "Port": port,
+ "Database": db,
+ "Uid": user,
+ "Pwd": password,
+ }
if self.configuration.get("use_ssl", False):
- connection_string += ";Encrypt=YES"
+ connection_params["Encrypt"] = "YES"
if not self.configuration.get("verify_ssl"):
- connection_string += ";TrustServerCertificate=YES"
+ connection_params["TrustServerCertificate"] = "YES"
+ else:
+ connection_params["TrustServerCertificate"] = "NO"
+ else:
+ connection_params["Encrypt"] = "NO"
+
+ def fn(k):
+ return "{}={}".format(k, connection_params[k])
+
+ connection_string = ";".join(list(map(fn, connection_params)))
connection = pyodbc.connect(connection_string)
cursor = connection.cursor()
@@ -128,20 +151,14 @@ def run_query(self, query, user):
data = cursor.fetchall()
if cursor.description is not None:
- columns = self.fetch_columns(
- [(i[0], types_map.get(i[1], None)) for i in cursor.description]
- )
- rows = [
- dict(zip((column["name"] for column in columns), row))
- for row in data
- ]
+ columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
else:
error = "No data was returned."
- json_data = None
+ data = None
cursor.close()
except pyodbc.Error as e:
@@ -151,7 +168,7 @@ def run_query(self, query, user):
except IndexError:
# Connection errors are `args[0][1]`
error = e.args[0][1]
- json_data = None
+ data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
@@ -159,7 +176,7 @@ def run_query(self, query, user):
if connection:
connection.close()
- return json_data, error
+ return data, error
register(SQLServerODBC)
diff --git a/redash/query_runner/mysql.py b/redash/query_runner/mysql.py
index 61dfde8c1c..1788ce07fc 100644
--- a/redash/query_runner/mysql.py
+++ b/redash/query_runner/mysql.py
@@ -3,18 +3,17 @@
import threading
from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
TYPE_FLOAT,
TYPE_INTEGER,
- TYPE_DATETIME,
TYPE_STRING,
- TYPE_DATE,
BaseSQLQueryRunner,
InterruptException,
JobTimeoutException,
register,
)
from redash.settings import parse_boolean
-from redash.utils import json_dumps, json_loads
try:
import MySQLdb
@@ -44,7 +43,7 @@
}
-class Result(object):
+class Result:
def __init__(self):
pass
@@ -54,9 +53,7 @@ class Mysql(BaseSQLQueryRunner):
@classmethod
def configuration_schema(cls):
- show_ssl_settings = parse_boolean(
- os.environ.get("MYSQL_SHOW_SSL_SETTINGS", "true")
- )
+ show_ssl_settings = parse_boolean(os.environ.get("MYSQL_SHOW_SSL_SETTINGS", "true"))
schema = {
"type": "object",
@@ -66,8 +63,22 @@ def configuration_schema(cls):
"passwd": {"type": "string", "title": "Password"},
"db": {"type": "string", "title": "Database name"},
"port": {"type": "number", "default": 3306},
+ "connect_timeout": {"type": "number", "default": 60, "title": "Connection Timeout"},
+ "charset": {"type": "string", "default": "utf8"},
+ "use_unicode": {"type": "boolean", "default": True},
+ "autocommit": {"type": "boolean", "default": False},
},
- "order": ["host", "port", "user", "passwd", "db"],
+ "order": [
+ "host",
+ "port",
+ "user",
+ "passwd",
+ "db",
+ "connect_timeout",
+ "charset",
+ "use_unicode",
+ "autocommit",
+ ],
"required": ["db"],
"secret": ["passwd"],
}
@@ -75,6 +86,18 @@ def configuration_schema(cls):
if show_ssl_settings:
schema["properties"].update(
{
+ "ssl_mode": {
+ "type": "string",
+ "title": "SSL Mode",
+ "default": "preferred",
+ "extendedEnum": [
+ {"value": "disabled", "name": "Disabled"},
+ {"value": "preferred", "name": "Preferred"},
+ {"value": "required", "name": "Required"},
+ {"value": "verify-ca", "name": "Verify CA"},
+ {"value": "verify-identity", "name": "Verify Identity"},
+ ],
+ },
"use_ssl": {"type": "boolean", "title": "Use SSL"},
"ssl_cacert": {
"type": "string",
@@ -108,9 +131,10 @@ def _connection(self):
passwd=self.configuration.get("passwd", ""),
db=self.configuration["db"],
port=self.configuration.get("port", 3306),
- charset="utf8",
- use_unicode=True,
- connect_timeout=60,
+ charset=self.configuration.get("charset", "utf8"),
+ use_unicode=self.configuration.get("use_unicode", True),
+ connect_timeout=self.configuration.get("connect_timeout", 60),
+ autocommit=self.configuration.get("autocommit", True),
)
ssl_options = self._get_ssl_parameters()
@@ -134,9 +158,7 @@ def _get_tables(self, schema):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
for row in results["rows"]:
if row["table_schema"] != self.configuration["db"]:
@@ -151,7 +173,6 @@ def _get_tables(self, schema):
return list(schema.values())
-
def run_query(self, query, user):
ev = threading.Event()
thread_id = ""
@@ -161,9 +182,7 @@ def run_query(self, query, user):
try:
connection = self._connection()
thread_id = connection.thread_id()
- t = threading.Thread(
- target=self._run_query, args=(query, user, connection, r, ev)
- )
+ t = threading.Thread(target=self._run_query, args=(query, user, connection, r, ev))
t.start()
while not ev.wait(1):
pass
@@ -172,7 +191,7 @@ def run_query(self, query, user):
t.join()
raise
- return r.json_data, r.error
+ return r.data, r.error
def _run_query(self, query, user, connection, r, ev):
try:
@@ -190,26 +209,21 @@ def _run_query(self, query, user, connection, r, ev):
# TODO - very similar to pg.py
if desc is not None:
- columns = self.fetch_columns(
- [(i[0], types_map.get(i[1], None)) for i in desc]
- )
- rows = [
- dict(zip((column["name"] for column in columns), row))
- for row in data
- ]
+ columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in desc])
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
data = {"columns": columns, "rows": rows}
- r.json_data = json_dumps(data)
+ r.data = data
r.error = None
else:
- r.json_data = None
+ r.data = None
r.error = "No data was returned."
cursor.close()
except MySQLdb.Error as e:
if cursor:
cursor.close()
- r.json_data = None
+ r.data = None
r.error = e.args[1]
finally:
ev.set()
@@ -223,7 +237,7 @@ def _get_ssl_parameters(self):
ssl_params = {}
if self.configuration.get("use_ssl"):
- config_map = {"ssl_cacert": "ca", "ssl_cert": "cert", "ssl_key": "key"}
+ config_map = {"ssl_mode": "preferred", "ssl_cacert": "ca", "ssl_cert": "cert", "ssl_key": "key"}
for key, cfg in config_map.items():
val = self.configuration.get(key)
if val:
@@ -281,9 +295,7 @@ def configuration_schema(cls):
def _get_ssl_parameters(self):
if self.configuration.get("use_ssl"):
- ca_path = os.path.join(
- os.path.dirname(__file__), "./files/rds-combined-ca-bundle.pem"
- )
+ ca_path = os.path.join(os.path.dirname(__file__), "./files/rds-combined-ca-bundle.pem")
return {"ca": ca_path}
return None
diff --git a/redash/query_runner/nz.py b/redash/query_runner/nz.py
new file mode 100644
index 0000000000..51f68ef1ed
--- /dev/null
+++ b/redash/query_runner/nz.py
@@ -0,0 +1,173 @@
+import logging
+import traceback
+
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ register,
+)
+
+logger = logging.getLogger(__name__)
+
+try:
+ import nzpy
+ import nzpy.core
+
+ _enabled = True
+ _nztypes = {
+ nzpy.core.NzTypeInt1: TYPE_INTEGER,
+ nzpy.core.NzTypeInt2: TYPE_INTEGER,
+ nzpy.core.NzTypeInt: TYPE_INTEGER,
+ nzpy.core.NzTypeInt8: TYPE_INTEGER,
+ nzpy.core.NzTypeBool: TYPE_BOOLEAN,
+ nzpy.core.NzTypeDate: TYPE_DATE,
+ nzpy.core.NzTypeTimestamp: TYPE_DATETIME,
+ nzpy.core.NzTypeDouble: TYPE_FLOAT,
+ nzpy.core.NzTypeFloat: TYPE_FLOAT,
+ nzpy.core.NzTypeChar: TYPE_STRING,
+ nzpy.core.NzTypeNChar: TYPE_STRING,
+ nzpy.core.NzTypeNVarChar: TYPE_STRING,
+ nzpy.core.NzTypeVarChar: TYPE_STRING,
+ nzpy.core.NzTypeVarFixedChar: TYPE_STRING,
+ nzpy.core.NzTypeNumeric: TYPE_FLOAT,
+ }
+
+ _cat_types = {
+ 16: TYPE_BOOLEAN, # boolean
+ 17: TYPE_STRING, # bytea
+ 19: TYPE_STRING, # name type
+ 20: TYPE_INTEGER, # int8
+ 21: TYPE_INTEGER, # int2
+ 23: TYPE_INTEGER, # int4
+ 25: TYPE_STRING, # TEXT type
+ 26: TYPE_INTEGER, # oid
+ 28: TYPE_INTEGER, # xid
+ 700: TYPE_FLOAT, # float4
+ 701: TYPE_FLOAT, # float8
+ 705: TYPE_STRING, # unknown
+ 829: TYPE_STRING, # MACADDR type
+ 1042: TYPE_STRING, # CHAR type
+ 1043: TYPE_STRING, # VARCHAR type
+ 1082: TYPE_DATE, # date
+ 1083: TYPE_DATETIME,
+ 1114: TYPE_DATETIME, # timestamp w/ tz
+ 1184: TYPE_DATETIME,
+ 1700: TYPE_FLOAT, # NUMERIC
+ 2275: TYPE_STRING, # cstring
+ 2950: TYPE_STRING, # uuid
+ }
+except ImportError:
+ _enabled = False
+ _nztypes = {}
+ _cat_types = {}
+
+
+class Netezza(BaseSQLQueryRunner):
+ noop_query = "SELECT 1"
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "user": {"type": "string"},
+ "password": {"type": "string"},
+ "host": {"type": "string", "default": "127.0.0.1"},
+ "port": {"type": "number", "default": 5480},
+ "database": {"type": "string", "title": "Database Name", "default": "system"},
+ },
+ "order": ["host", "port", "user", "password", "database"],
+ "required": ["user", "password", "database"],
+ "secret": ["password"],
+ }
+
+ @classmethod
+ def type(cls):
+ return "nz"
+
+ def __init__(self, configuration):
+ super().__init__(configuration)
+ self._conn = None
+
+ @property
+ def connection(self):
+ if self._conn is None:
+ self._conn = nzpy.connect(
+ host=self.configuration.get("host"),
+ user=self.configuration.get("user"),
+ password=self.configuration.get("password"),
+ port=self.configuration.get("port"),
+ database=self.configuration.get("database"),
+ )
+ return self._conn
+
+ def get_schema(self, get_stats=False):
+ qry = """
+ select
+ table_schema || '.' || table_name as table_name,
+ column_name,
+ data_type
+ from
+ columns
+ where
+ table_schema not in (^information_schema^, ^definition_schema^) and
+ table_catalog = current_catalog;
+ """
+ schema = {}
+ with self.connection.cursor() as cursor:
+ cursor.execute(qry)
+ for table_name, column_name, data_type in cursor:
+ if table_name not in schema:
+ schema[table_name] = {"name": table_name, "columns": []}
+ schema[table_name]["columns"].append({"name": column_name, "type": data_type})
+ return list(schema.values())
+
+ @classmethod
+ def enabled(cls):
+ global _enabled
+ return _enabled
+
+ def type_map(self, typid, func):
+ global _nztypes, _cat_types
+ typ = _nztypes.get(typid)
+ if typ is None:
+ return _cat_types.get(typid)
+ # check for conflicts
+ if typid == nzpy.core.NzTypeVarChar:
+ return TYPE_BOOLEAN if "bool" in func.__name__ else typ
+
+ if typid == nzpy.core.NzTypeInt2:
+ return TYPE_STRING if "text" in func.__name__ else typ
+
+ if typid in (nzpy.core.NzTypeVarFixedChar, nzpy.core.NzTypeVarBinary, nzpy.core.NzTypeNVarChar):
+ return TYPE_INTEGER if "int" in func.__name__ else typ
+ return typ
+
+ def run_query(self, query, user):
+ data, error = None, None
+ try:
+ with self.connection.cursor() as cursor:
+ cursor.execute(query)
+ if cursor.description is None:
+ columns = {"columns": [], "rows": []}
+ else:
+ columns = self.fetch_columns(
+ [
+ (val[0], self.type_map(val[1], cursor.ps["row_desc"][i]["func"]))
+ for i, val in enumerate(cursor.description)
+ ]
+ )
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
+
+ data = {"columns": columns, "rows": rows}
+ except Exception:
+ error = traceback.format_exc()
+ return data, error
+
+
+register(Netezza)
diff --git a/redash/query_runner/oracle.py b/redash/query_runner/oracle.py
index cdd5be80e6..c721de9069 100644
--- a/redash/query_runner/oracle.py
+++ b/redash/query_runner/oracle.py
@@ -1,26 +1,33 @@
-import os
import logging
+import os
-from redash.utils import json_dumps, json_loads
-from redash.query_runner import *
+from redash.query_runner import (
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ JobTimeoutException,
+ register,
+)
try:
- import cx_Oracle
+ import oracledb
TYPES_MAP = {
- cx_Oracle.DATETIME: TYPE_DATETIME,
- cx_Oracle.CLOB: TYPE_STRING,
- cx_Oracle.LOB: TYPE_STRING,
- cx_Oracle.FIXED_CHAR: TYPE_STRING,
- cx_Oracle.FIXED_NCHAR: TYPE_STRING,
- cx_Oracle.INTERVAL: TYPE_DATETIME,
- cx_Oracle.LONG_STRING: TYPE_STRING,
- cx_Oracle.NATIVE_FLOAT: TYPE_FLOAT,
- cx_Oracle.NCHAR: TYPE_STRING,
- cx_Oracle.NUMBER: TYPE_FLOAT,
- cx_Oracle.ROWID: TYPE_INTEGER,
- cx_Oracle.STRING: TYPE_STRING,
- cx_Oracle.TIMESTAMP: TYPE_DATETIME,
+ oracledb.DATETIME: TYPE_DATETIME,
+ oracledb.CLOB: TYPE_STRING,
+ oracledb.LOB: TYPE_STRING,
+ oracledb.FIXED_CHAR: TYPE_STRING,
+ oracledb.FIXED_NCHAR: TYPE_STRING,
+ oracledb.INTERVAL: TYPE_DATETIME,
+ oracledb.LONG_STRING: TYPE_STRING,
+ oracledb.NATIVE_FLOAT: TYPE_FLOAT,
+ oracledb.NCHAR: TYPE_STRING,
+ oracledb.NUMBER: TYPE_FLOAT,
+ oracledb.ROWID: TYPE_INTEGER,
+ oracledb.STRING: TYPE_STRING,
+ oracledb.TIMESTAMP: TYPE_DATETIME,
}
ENABLED = True
@@ -33,10 +40,12 @@
class Oracle(BaseSQLQueryRunner):
should_annotate_query = False
noop_query = "SELECT 1 FROM dual"
+ limit_query = " FETCH NEXT 1000 ROWS ONLY"
+ limit_keywords = ["ROW", "ROWS", "ONLY", "TIES"]
@classmethod
def get_col_type(cls, col_type, scale):
- if col_type == cx_Oracle.NUMBER:
+ if col_type == oracledb.NUMBER:
if scale is None:
return TYPE_INTEGER
if scale > 0:
@@ -56,7 +65,10 @@ def configuration_schema(cls):
"properties": {
"user": {"type": "string"},
"password": {"type": "string"},
- "host": {"type": "string"},
+ "host": {
+ "type": "string",
+ "title": "Host: To use a DSN Service Name instead, use the text string `_useservicename` in the host name field.",
+ },
"port": {"type": "number"},
"servicename": {"type": "string", "title": "DSN Service Name"},
"encoding": {"type": "string"},
@@ -83,12 +95,10 @@ def _get_tables(self, schema):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
for row in results["rows"]:
- if row["OWNER"] != None:
+ if row["OWNER"] is not None:
table_name = "{}.{}".format(row["OWNER"], row["TABLE_NAME"])
else:
table_name = row["TABLE_NAME"]
@@ -104,21 +114,21 @@ def _get_tables(self, schema):
def _convert_number(cls, value):
try:
return int(value)
- except:
+ except BaseException:
return value
@classmethod
def output_handler(cls, cursor, name, default_type, length, precision, scale):
- if default_type in (cx_Oracle.CLOB, cx_Oracle.LOB):
- return cursor.var(cx_Oracle.LONG_STRING, 80000, cursor.arraysize)
+ if default_type in (oracledb.CLOB, oracledb.LOB):
+ return cursor.var(oracledb.LONG_STRING, 80000, cursor.arraysize)
- if default_type in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
+ if default_type in (oracledb.STRING, oracledb.FIXED_CHAR):
return cursor.var(str, length, cursor.arraysize)
- if default_type == cx_Oracle.NUMBER:
+ if default_type == oracledb.NUMBER:
if scale <= 0:
return cursor.var(
- cx_Oracle.STRING,
+ oracledb.STRING,
255,
outconverter=Oracle._convert_number,
arraysize=cursor.arraysize,
@@ -128,13 +138,17 @@ def run_query(self, query, user):
if self.configuration.get("encoding"):
os.environ["NLS_LANG"] = self.configuration["encoding"]
- dsn = cx_Oracle.makedsn(
- self.configuration["host"],
- self.configuration["port"],
- service_name=self.configuration["servicename"],
- )
+ # To use a DSN Service Name instead, use the text string `_useservicename` in the host name field.
+ if self.configuration["host"].lower() == "_useservicename":
+ dsn = self.configuration["servicename"]
+ else:
+ dsn = oracledb.makedsn(
+ self.configuration["host"],
+ self.configuration["port"],
+ service_name=self.configuration["servicename"],
+ )
- connection = cx_Oracle.connect(
+ connection = oracledb.connect(
user=self.configuration["user"],
password=self.configuration["password"],
dsn=dsn,
@@ -147,25 +161,21 @@ def run_query(self, query, user):
cursor.execute(query)
rows_count = cursor.rowcount
if cursor.description is not None:
- columns = self.fetch_columns(
- [
- (i[0], Oracle.get_col_type(i[1], i[5]))
- for i in cursor.description
- ]
- )
+ columns = self.fetch_columns([(i[0], Oracle.get_col_type(i[1], i[5])) for i in cursor.description])
rows = [dict(zip((c["name"] for c in columns), row)) for row in cursor]
data = {"columns": columns, "rows": rows}
error = None
- json_data = json_dumps(data)
else:
columns = [{"name": "Row(s) Affected", "type": "TYPE_INTEGER"}]
rows = [{"Row(s) Affected": rows_count}]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
connection.commit()
- except cx_Oracle.DatabaseError as err:
- error = "Query failed. {}.".format(str(err))
- json_data = None
+ except oracledb.DatabaseError as err:
+ (err_args,) = err.args
+ line_number = query.count("\n", 0, err_args.offset) + 1
+ column_number = err_args.offset - query.rfind("\n", 0, err_args.offset) - 1
+ error = "Query failed at line {}, column {}: {}".format(str(line_number), str(column_number), str(err))
+ data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
@@ -173,7 +183,7 @@ def run_query(self, query, user):
os.environ.pop("NLS_LANG", None)
connection.close()
- return json_data, error
+ return data, error
register(Oracle)
diff --git a/redash/query_runner/pg.py b/redash/query_runner/pg.py
index 6af812faeb..c7ddef1eb7 100644
--- a/redash/query_runner/pg.py
+++ b/redash/query_runner/pg.py
@@ -1,7 +1,6 @@
-import os
import logging
+import os
import select
-from contextlib import contextmanager
from base64 import b64decode
from tempfile import NamedTemporaryFile
from uuid import uuid4
@@ -9,8 +8,18 @@
import psycopg2
from psycopg2.extras import Range
-from redash.query_runner import *
-from redash.utils import JSONEncoder, json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ InterruptException,
+ JobTimeoutException,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -30,30 +39,22 @@
701: TYPE_FLOAT,
16: TYPE_BOOLEAN,
1082: TYPE_DATE,
+ 1182: TYPE_DATE,
1114: TYPE_DATETIME,
1184: TYPE_DATETIME,
+ 1115: TYPE_DATETIME,
+ 1185: TYPE_DATETIME,
1014: TYPE_STRING,
1015: TYPE_STRING,
1008: TYPE_STRING,
1009: TYPE_STRING,
2951: TYPE_STRING,
+ 1043: TYPE_STRING,
+ 1002: TYPE_STRING,
+ 1003: TYPE_STRING,
}
-class PostgreSQLJSONEncoder(JSONEncoder):
- def default(self, o):
- if isinstance(o, Range):
- # From: https://github.com/psycopg/psycopg2/pull/779
- if o._bounds is None:
- return ""
-
- items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]]
-
- return "".join(items)
-
- return super(PostgreSQLJSONEncoder, self).default(o)
-
-
def _wait(conn, timeout=None):
while 1:
try:
@@ -182,13 +183,23 @@ def configuration_schema(cls):
def type(cls):
return "pg"
+ @classmethod
+ def custom_json_encoder(cls, dec, o):
+ if isinstance(o, Range):
+ # From: https://github.com/psycopg/psycopg2/pull/779
+ if o._bounds is None:
+ return ""
+
+ items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]]
+
+ return "".join(items)
+ return None
+
def _get_definitions(self, schema, query):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
build_schema(results, schema)
@@ -221,6 +232,8 @@ def _get_tables(self, schema):
AND a.attnum > 0
AND NOT a.attisdropped
WHERE c.relkind IN ('m', 'f', 'p')
+ AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
+ AND has_schema_privilege(s.nspname, 'usage')
UNION
@@ -261,26 +274,20 @@ def run_query(self, query, user):
_wait(connection)
if cursor.description is not None:
- columns = self.fetch_columns(
- [(i[0], types_map.get(i[1], None)) for i in cursor.description]
- )
- rows = [
- dict(zip((column["name"] for column in columns), row))
- for row in cursor
- ]
+ columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
data = {"columns": columns, "rows": rows}
error = None
- json_data = json_dumps(data, ignore_nan=True, cls=PostgreSQLJSONEncoder)
else:
error = "Query completed but it returned no data."
- json_data = None
- except (select.error, OSError) as e:
+ data = None
+ except (select.error, OSError):
error = "Query interrupted. Please retry."
- json_data = None
+ data = None
except psycopg2.DatabaseError as e:
error = str(e)
- json_data = None
+ data = None
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
connection.cancel()
raise
@@ -288,7 +295,7 @@ def run_query(self, query, user):
connection.close()
_cleanup_ssl_certs(self.ssl_config)
- return json_data, error
+ return data, error
class Redshift(PostgreSQL):
@@ -303,9 +310,7 @@ def name(cls):
def _get_connection(self):
self.ssl_config = {}
- sslrootcert_path = os.path.join(
- os.path.dirname(__file__), "./files/redshift-ca-bundle.crt"
- )
+ sslrootcert_path = os.path.join(os.path.dirname(__file__), "./files/redshift-ca-bundle.crt")
connection = psycopg2.connect(
user=self.configuration.get("user"),
@@ -383,12 +388,13 @@ def _get_tables(self, schema):
SELECT DISTINCT table_name,
table_schema,
column_name,
+ data_type,
ordinal_position AS pos
FROM svv_columns
WHERE table_schema NOT IN ('pg_internal','pg_catalog','information_schema')
AND table_schema NOT LIKE 'pg_temp_%'
)
- SELECT table_name, table_schema, column_name
+ SELECT table_name, table_schema, column_name, data_type
FROM tables
WHERE
HAS_SCHEMA_PRIVILEGE(table_schema, 'USAGE') AND
@@ -419,15 +425,11 @@ def enabled(cls):
def _login_method_selection(self):
if self.configuration.get("rolename"):
- if not self.configuration.get(
- "aws_access_key_id"
- ) or not self.configuration.get("aws_secret_access_key"):
+ if not self.configuration.get("aws_access_key_id") or not self.configuration.get("aws_secret_access_key"):
return "ASSUME_ROLE_NO_KEYS"
else:
return "ASSUME_ROLE_KEYS"
- elif self.configuration.get("aws_access_key_id") and self.configuration.get(
- "aws_secret_access_key"
- ):
+ elif self.configuration.get("aws_access_key_id") and self.configuration.get("aws_secret_access_key"):
return "KEYS"
elif not self.configuration.get("password"):
return "ROLE"
@@ -480,10 +482,9 @@ def configuration_schema(cls):
}
def _get_connection(self):
+ self.ssl_config = {}
- sslrootcert_path = os.path.join(
- os.path.dirname(__file__), "./files/redshift-ca-bundle.crt"
- )
+ sslrootcert_path = os.path.join(os.path.dirname(__file__), "./files/redshift-ca-bundle.crt")
login_method = self._login_method_selection()
@@ -495,23 +496,17 @@ def _get_connection(self):
aws_secret_access_key=self.configuration.get("aws_secret_access_key"),
)
elif login_method == "ROLE":
- client = boto3.client(
- "redshift", region_name=self.configuration.get("aws_region")
- )
+ client = boto3.client("redshift", region_name=self.configuration.get("aws_region"))
else:
if login_method == "ASSUME_ROLE_KEYS":
assume_client = client = boto3.client(
"sts",
region_name=self.configuration.get("aws_region"),
aws_access_key_id=self.configuration.get("aws_access_key_id"),
- aws_secret_access_key=self.configuration.get(
- "aws_secret_access_key"
- ),
+ aws_secret_access_key=self.configuration.get("aws_secret_access_key"),
)
else:
- assume_client = client = boto3.client(
- "sts", region_name=self.configuration.get("aws_region")
- )
+ assume_client = client = boto3.client("sts", region_name=self.configuration.get("aws_region"))
role_session = f"redash_{uuid4().hex}"
session_keys = assume_client.assume_role(
RoleArn=self.configuration.get("rolename"), RoleSessionName=role_session
diff --git a/redash/query_runner/phoenix.py b/redash/query_runner/phoenix.py
index c3e2f242dc..e76b2f7d3c 100644
--- a/redash/query_runner/phoenix.py
+++ b/redash/query_runner/phoenix.py
@@ -1,13 +1,20 @@
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
-
import logging
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
+
logger = logging.getLogger(__name__)
try:
import phoenixdb
- from phoenixdb.errors import *
+ from phoenixdb.errors import Error
enabled = True
@@ -72,9 +79,7 @@ def get_schema(self, get_stats=False):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
for row in results["rows"]:
table_name = "{}.{}".format(row["TABLE_SCHEM"], row["TABLE_NAME"])
@@ -87,36 +92,26 @@ def get_schema(self, get_stats=False):
return list(schema.values())
def run_query(self, query, user):
- connection = phoenixdb.connect(
- url=self.configuration.get("url", ""), autocommit=True
- )
+ connection = phoenixdb.connect(url=self.configuration.get("url", ""), autocommit=True)
cursor = connection.cursor()
try:
cursor.execute(query)
- column_tuples = [
- (i[0], TYPES_MAPPING.get(i[1], None)) for i in cursor.description
- ]
+ column_tuples = [(i[0], TYPES_MAPPING.get(i[1], None)) for i in cursor.description]
columns = self.fetch_columns(column_tuples)
- rows = [
- dict(zip(([column["name"] for column in columns]), r))
- for i, r in enumerate(cursor.fetchall())
- ]
+ rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
cursor.close()
except Error as e:
- json_data = None
- error = "code: {}, sql state:{}, message: {}".format(
- e.code, e.sqlstate, str(e)
- )
+ data = None
+ error = "code: {}, sql state:{}, message: {}".format(e.code, e.sqlstate, str(e))
finally:
if connection:
connection.close()
- return json_data, error
+ return data, error
register(Phoenix)
diff --git a/redash/query_runner/pinot.py b/redash/query_runner/pinot.py
new file mode 100644
index 0000000000..0bcdcef9ed
--- /dev/null
+++ b/redash/query_runner/pinot.py
@@ -0,0 +1,143 @@
+try:
+ import pinotdb
+
+ enabled = True
+except ImportError:
+ enabled = False
+
+import logging
+
+import requests
+from requests.auth import HTTPBasicAuth
+
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
+
+logger = logging.getLogger(__name__)
+
+PINOT_TYPES_MAPPING = {
+ "BOOLEAN": TYPE_BOOLEAN,
+ "INT": TYPE_INTEGER,
+ "LONG": TYPE_INTEGER,
+ "FLOAT": TYPE_FLOAT,
+ "DOUBLE": TYPE_FLOAT,
+ "STRING": TYPE_STRING,
+ "BYTES": TYPE_STRING,
+ "JSON": TYPE_STRING,
+ "TIMESTAMP": TYPE_DATETIME,
+}
+
+
+class Pinot(BaseQueryRunner):
+ noop_query = "SELECT 1"
+ username = None
+ password = None
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "brokerHost": {"type": "string", "default": ""},
+ "brokerPort": {"type": "number", "default": 8099},
+ "brokerScheme": {"type": "string", "default": "http"},
+ "controllerURI": {"type": "string", "default": ""},
+ "username": {"type": "string"},
+ "password": {"type": "string"},
+ },
+ "order": ["brokerScheme", "brokerHost", "brokerPort", "controllerURI", "username", "password"],
+ "required": ["brokerHost", "controllerURI"],
+ "secret": ["password"],
+ }
+
+ @classmethod
+ def enabled(cls):
+ return enabled
+
+ def __init__(self, configuration):
+ super(Pinot, self).__init__(configuration)
+ self.controller_uri = self.configuration.get("controllerURI")
+ self.username = self.configuration.get("username") or None
+ self.password = self.configuration.get("password") or None
+
+ def run_query(self, query, user):
+ logger.debug("Running query %s with username: %s", query, self.username)
+ connection = pinotdb.connect(
+ host=self.configuration["brokerHost"],
+ port=self.configuration["brokerPort"],
+ path="/query/sql",
+ scheme=(self.configuration.get("brokerScheme") or "http"),
+ verify_ssl=False,
+ username=self.username,
+ password=self.password,
+ )
+
+ cursor = connection.cursor()
+
+ try:
+ cursor.execute(query)
+ logger.debug("cursor.schema = %s", cursor.schema)
+ columns = self.fetch_columns(
+ [(i["name"], PINOT_TYPES_MAPPING.get(i["type"], None)) for i in cursor.schema]
+ )
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
+
+ data = {"columns": columns, "rows": rows}
+ error = None
+ logger.debug("Pinot execute query [%s]", query)
+ finally:
+ connection.close()
+
+ return data, error
+
+ def get_schema(self, get_stats=False):
+ schema = {}
+ for schema_name in self.get_schema_names():
+ for table_name in self.get_table_names():
+ schema_table_name = "{}.{}".format(schema_name, table_name)
+ if table_name not in schema:
+ schema[schema_table_name] = {"name": schema_table_name, "columns": []}
+ table_schema = self.get_pinot_table_schema(table_name)
+
+ for column in (
+ table_schema.get("dimensionFieldSpecs", [])
+ + table_schema.get("metricFieldSpecs", [])
+ + table_schema.get("dateTimeFieldSpecs", [])
+ ):
+ c = {
+ "name": column["name"],
+ "type": PINOT_TYPES_MAPPING[column["dataType"]],
+ }
+ schema[schema_table_name]["columns"].append(c)
+ return list(schema.values())
+
+ def get_schema_names(self):
+ return ["default"]
+
+ def get_pinot_table_schema(self, pinot_table_name):
+ return self.get_metadata_from_controller("/tables/" + pinot_table_name + "/schema")
+
+ def get_table_names(self):
+ return self.get_metadata_from_controller("/tables")["tables"]
+
+ def get_metadata_from_controller(self, path):
+ url = self.controller_uri + path
+ r = requests.get(url, headers={"Accept": "application/json"}, auth=HTTPBasicAuth(self.username, self.password))
+ try:
+ result = r.json()
+ logger.debug("get_metadata_from_controller from path %s", path)
+ except ValueError as e:
+ raise pinotdb.exceptions.DatabaseError(
+ f"Got invalid json response from {self.controller_uri}:{path}: {r.text}"
+ ) from e
+ return result
+
+
+register(Pinot)
diff --git a/redash/query_runner/presto.py b/redash/query_runner/presto.py
index ac76c5e710..e6e7bc785e 100644
--- a/redash/query_runner/presto.py
+++ b/redash/query_runner/presto.py
@@ -1,9 +1,17 @@
-from collections import defaultdict
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
-
import logging
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ InterruptException,
+ JobTimeoutException,
+ register,
+)
+
logger = logging.getLogger(__name__)
@@ -78,9 +86,7 @@ def get_schema(self, get_stats=False):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
for row in results["rows"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
@@ -107,24 +113,16 @@ def run_query(self, query, user):
try:
cursor.execute(query)
- column_tuples = [
- (i[0], PRESTO_TYPES_MAPPING.get(i[1], None)) for i in cursor.description
- ]
+ column_tuples = [(i[0], PRESTO_TYPES_MAPPING.get(i[1], None)) for i in cursor.description]
columns = self.fetch_columns(column_tuples)
- rows = [
- dict(zip(([column["name"] for column in columns]), r))
- for i, r in enumerate(cursor.fetchall())
- ]
+ rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
except DatabaseError as db:
- json_data = None
+ data = None
default_message = "Unspecified DatabaseError: {0}".format(str(db))
if isinstance(db.args[0], dict):
- message = db.args[0].get("failureInfo", {"message", None}).get(
- "message"
- )
+ message = db.args[0].get("failureInfo", {"message", None}).get("message")
else:
message = None
error = default_message if message is None else message
@@ -132,7 +130,7 @@ def run_query(self, query, user):
cursor.cancel()
raise
- return json_data, error
+ return data, error
register(Presto)
diff --git a/redash/query_runner/prometheus.py b/redash/query_runner/prometheus.py
index ad6c3348ab..34b5aa94d5 100644
--- a/redash/query_runner/prometheus.py
+++ b/redash/query_runner/prometheus.py
@@ -1,10 +1,19 @@
-import requests
+import os
import time
+from base64 import b64decode
from datetime import datetime
-from dateutil import parser
+from tempfile import NamedTemporaryFile
from urllib.parse import parse_qs
-from redash.query_runner import BaseQueryRunner, register, TYPE_DATETIME, TYPE_STRING
-from redash.utils import json_dumps
+
+import requests
+from dateutil import parser
+
+from redash.query_runner import (
+ TYPE_DATETIME,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
def get_instant_rows(metrics_data):
@@ -48,7 +57,7 @@ def convert_query_range(payload):
continue
value = payload[key][0]
- if type(value) is str:
+ if isinstance(value, str):
# Don't convert timestamp string
try:
int(value)
@@ -66,29 +75,107 @@ def convert_query_range(payload):
class Prometheus(BaseQueryRunner):
should_annotate_query = False
+ def _get_datetime_now(self):
+ return datetime.now()
+
+ def _get_prometheus_kwargs(self):
+ ca_cert_file = self._create_cert_file("ca_cert_File")
+ if ca_cert_file is not None:
+ verify = ca_cert_file
+ else:
+ verify = self.configuration.get("verify_ssl", True)
+
+ cert_file = self._create_cert_file("cert_File")
+ cert_key_file = self._create_cert_file("cert_key_File")
+ if cert_file is not None and cert_key_file is not None:
+ cert = (cert_file, cert_key_file)
+ else:
+ cert = ()
+
+ return {
+ "verify": verify,
+ "cert": cert,
+ }
+
+ def _create_cert_file(self, key):
+ cert_file_name = None
+
+ if self.configuration.get(key, None) is not None:
+ with NamedTemporaryFile(mode="w", delete=False) as cert_file:
+ cert_bytes = b64decode(self.configuration[key])
+ cert_file.write(cert_bytes.decode("utf-8"))
+ cert_file_name = cert_file.name
+
+ return cert_file_name
+
+ def _cleanup_cert_files(self, promehteus_kwargs):
+ verify = promehteus_kwargs.get("verify", True)
+ if isinstance(verify, str) and os.path.exists(verify):
+ os.remove(verify)
+
+ cert = promehteus_kwargs.get("cert", ())
+ for cert_file in cert:
+ if os.path.exists(cert_file):
+ os.remove(cert_file)
+
@classmethod
def configuration_schema(cls):
+ # files has to end with "File" in name
return {
"type": "object",
- "properties": {"url": {"type": "string", "title": "Prometheus API URL"}},
+ "properties": {
+ "url": {"type": "string", "title": "Prometheus API URL"},
+ "verify_ssl": {
+ "type": "boolean",
+ "title": "Verify SSL (Ignored, if SSL Root Certificate is given)",
+ "default": True,
+ },
+ "cert_File": {"type": "string", "title": "SSL Client Certificate", "default": None},
+ "cert_key_File": {"type": "string", "title": "SSL Client Key", "default": None},
+ "ca_cert_File": {"type": "string", "title": "SSL Root Certificate", "default": None},
+ },
"required": ["url"],
+ "secret": ["cert_File", "cert_key_File", "ca_cert_File"],
+ "extra_options": ["verify_ssl", "cert_File", "cert_key_File", "ca_cert_File"],
}
def test_connection(self):
- resp = requests.get(self.configuration.get("url", None))
- return resp.ok
+ result = False
+ promehteus_kwargs = {}
+ try:
+ promehteus_kwargs = self._get_prometheus_kwargs()
+ resp = requests.get(self.configuration.get("url", None), **promehteus_kwargs)
+ result = resp.ok
+ except Exception:
+ raise
+ finally:
+ self._cleanup_cert_files(promehteus_kwargs)
+
+ return result
def get_schema(self, get_stats=False):
- base_url = self.configuration["url"]
- metrics_path = "/api/v1/label/__name__/values"
- response = requests.get(base_url + metrics_path)
- response.raise_for_status()
- data = response.json()["data"]
+ schema = []
+ promehteus_kwargs = {}
+ try:
+ base_url = self.configuration["url"]
+ metrics_path = "/api/v1/label/__name__/values"
+ promehteus_kwargs = self._get_prometheus_kwargs()
+
+ response = requests.get(base_url + metrics_path, **promehteus_kwargs)
+
+ response.raise_for_status()
+ data = response.json()["data"]
+
+ schema = {}
+ for name in data:
+ schema[name] = {"name": name, "columns": []}
+ schema = list(schema.values())
+ except Exception:
+ raise
+ finally:
+ self._cleanup_cert_files(promehteus_kwargs)
- schema = {}
- for name in data:
- schema[name] = {"name": name, "columns": []}
- return list(schema.values())
+ return schema
def run_query(self, query, user):
"""
@@ -113,30 +200,29 @@ def run_query(self, query, user):
{"friendly_name": "timestamp", "type": TYPE_DATETIME, "name": "timestamp"},
{"friendly_name": "value", "type": TYPE_STRING, "name": "value"},
]
+ promehteus_kwargs = {}
try:
error = None
query = query.strip()
# for backward compatibility
- query = (
- "query={}".format(query) if not query.startswith("query=") else query
- )
+ query = "query={}".format(query) if not query.startswith("query=") else query
payload = parse_qs(query)
query_type = "query_range" if "step" in payload.keys() else "query"
# for the range of until now
- if query_type == "query_range" and (
- "end" not in payload.keys() or "now" in payload["end"]
- ):
- date_now = datetime.now()
+ if query_type == "query_range" and ("end" not in payload.keys() or "now" in payload["end"]):
+ date_now = self._get_datetime_now()
payload.update({"end": [date_now]})
convert_query_range(payload)
api_endpoint = base_url + "/api/v1/{}".format(query_type)
- response = requests.get(api_endpoint, params=payload)
+ promehteus_kwargs = self._get_prometheus_kwargs()
+
+ response = requests.get(api_endpoint, params=payload, **promehteus_kwargs)
response.raise_for_status()
metrics = response.json()["data"]["result"]
@@ -160,12 +246,16 @@ def run_query(self, query, user):
else:
rows = get_instant_rows(metrics)
- json_data = json_dumps({"rows": rows, "columns": columns})
+ data = {"rows": rows, "columns": columns}
except requests.RequestException as e:
return None, str(e)
+ except Exception:
+ raise
+ finally:
+ self._cleanup_cert_files(promehteus_kwargs)
- return json_data, error
+ return data, error
register(Prometheus)
diff --git a/redash/query_runner/python.py b/redash/query_runner/python.py
index ab3f25a53d..27f1146ae5 100644
--- a/redash/query_runner/python.py
+++ b/redash/query_runner/python.py
@@ -3,17 +3,42 @@
import logging
import sys
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
-from redash import models
from RestrictedPython import compile_restricted
-from RestrictedPython.Guards import safe_builtins, guarded_iter_unpack_sequence, guarded_unpack_sequence
+from RestrictedPython.Guards import (
+ guarded_iter_unpack_sequence,
+ guarded_unpack_sequence,
+ safe_builtins,
+)
+from RestrictedPython.transformer import IOPERATOR_TO_STR
+
+from redash import models
+from redash.query_runner import (
+ SUPPORTED_COLUMN_TYPES,
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
+from redash.utils.pandas import pandas_installed
+
+if pandas_installed:
+ import pandas as pd
+
+ from redash.utils.pandas import pandas_to_result
+
+ enabled = True
+else:
+ enabled = False
logger = logging.getLogger(__name__)
-class CustomPrint(object):
+class CustomPrint:
"""CustomPrint redirect "print" calls to be sent as "log" on the result object."""
def __init__(self):
@@ -23,9 +48,7 @@ def __init__(self):
def write(self, text):
if self.enabled:
if text and text.strip():
- log_line = "[{0}] {1}".format(
- datetime.datetime.utcnow().isoformat(), text
- )
+ log_line = "[{0}] {1}".format(datetime.datetime.utcnow().isoformat(), text)
self.lines.append(log_line)
def enable(self):
@@ -45,31 +68,31 @@ class Python(BaseQueryRunner):
should_annotate_query = False
safe_builtins = (
- "sorted",
- "reversed",
- "map",
- "any",
+ "abs",
"all",
- "slice",
+ "any",
+ "bool",
+ "complex",
+ "dict",
+ "divmod",
+ "enumerate",
"filter",
+ "float",
+ "int",
"len",
- "next",
- "enumerate",
- "sum",
- "abs",
- "min",
+ "list",
+ "map",
"max",
+ "min",
+ "next",
+ "reversed",
"round",
- "divmod",
+ "set",
+ "slice",
+ "sorted",
"str",
- "int",
- "float",
- "complex",
+ "sum",
"tuple",
- "set",
- "list",
- "dict",
- "bool",
)
@classmethod
@@ -112,7 +135,7 @@ def __init__(self, configuration):
if self.configuration.get("additionalBuiltins", None):
for b in self.configuration["additionalBuiltins"].split(","):
if b not in self.safe_builtins:
- self.safe_builtins += (b, )
+ self.safe_builtins += (b,)
def custom_import(self, name, globals=None, locals=None, fromlist=(), level=0):
if name in self._allowed_modules:
@@ -125,9 +148,7 @@ def custom_import(self, name, globals=None, locals=None, fromlist=(), level=0):
return m
- raise Exception(
- "'{0}' is not configured as a supported import module".format(name)
- )
+ raise Exception("'{0}' is not configured as a supported import module".format(name))
@staticmethod
def custom_write(obj):
@@ -145,6 +166,14 @@ def custom_get_item(obj, key):
def custom_get_iter(obj):
return iter(obj)
+ @staticmethod
+ def custom_inplacevar(op, x, y):
+ if op not in IOPERATOR_TO_STR.values():
+ raise Exception("'{} is not supported inplace variable'".format(op))
+ glb = {"x": x, "y": y}
+ exec("x" + op + "y", glb)
+ return glb["x"]
+
@staticmethod
def add_result_column(result, column_name, friendly_name, column_type):
"""Helper function to add columns inside a Python script running in Redash in an easier way
@@ -161,9 +190,7 @@ def add_result_column(result, column_name, friendly_name, column_type):
if "columns" not in result:
result["columns"] = []
- result["columns"].append(
- {"name": column_name, "friendly_name": friendly_name, "type": column_type}
- )
+ result["columns"].append({"name": column_name, "friendly_name": friendly_name, "type": column_type})
@staticmethod
def add_result_row(result, values):
@@ -179,7 +206,7 @@ def add_result_row(result, values):
result["rows"].append(values)
@staticmethod
- def execute_query(data_source_name_or_id, query):
+ def execute_query(data_source_name_or_id, query, result_type=None):
"""Run query from specific data source.
Parameters:
@@ -187,7 +214,7 @@ def execute_query(data_source_name_or_id, query):
:query string: Query to run
"""
try:
- if type(data_source_name_or_id) == int:
+ if isinstance(data_source_name_or_id, int):
data_source = models.DataSource.get_by_id(data_source_name_or_id)
else:
data_source = models.DataSource.get_by_name(data_source_name_or_id)
@@ -200,7 +227,12 @@ def execute_query(data_source_name_or_id, query):
raise Exception(error)
# TODO: allow avoiding the JSON dumps/loads in same process
- return json_loads(data)
+ query_result = data
+
+ if result_type == "dataframe" and pandas_installed:
+ return pd.DataFrame(query_result["rows"])
+
+ return query_result
@staticmethod
def get_source_schema(data_source_name_or_id):
@@ -210,7 +242,7 @@ def get_source_schema(data_source_name_or_id):
:return:
"""
try:
- if type(data_source_name_or_id) == int:
+ if isinstance(data_source_name_or_id, int):
data_source = models.DataSource.get_by_id(data_source_name_or_id)
else:
data_source = models.DataSource.get_by_name(data_source_name_or_id)
@@ -239,12 +271,38 @@ def get_query_result(query_id):
return query.latest_query_data.data
+ def dataframe_to_result(self, result, df):
+ converted_result = pandas_to_result(df)
+
+ result["rows"] = converted_result["rows"]
+ for column in converted_result["columns"]:
+ self.add_result_column(result, column["name"], column["friendly_name"], column["type"])
+
def get_current_user(self):
return self._current_user.to_dict()
def test_connection(self):
pass
+ def validate_result(self, result):
+ """Validate the result after executing the query.
+
+ Parameters:
+ :result dict: The result dict.
+ """
+ if not result:
+ raise Exception("local variable `result` should not be empty.")
+ if not isinstance(result, dict):
+ raise Exception("local variable `result` should be of type `dict`.")
+ if "rows" not in result:
+ raise Exception("Missing `rows` field in `result` dict.")
+ if "columns" not in result:
+ raise Exception("Missing `columns` field in `result` dict.")
+ if not isinstance(result["rows"], list):
+ raise Exception("`rows` field should be of type `list`.")
+ if not isinstance(result["columns"], list):
+ raise Exception("`columns` field should be of type `list`.")
+
def run_query(self, query, user):
self._current_user = user
@@ -265,6 +323,7 @@ def run_query(self, query, user):
builtins["_print_"] = self._custom_print
builtins["_unpack_sequence_"] = guarded_unpack_sequence
builtins["_iter_unpack_sequence_"] = guarded_iter_unpack_sequence
+ builtins["_inplacevar_"] = self.custom_inplacevar
# Layer in our own additional set of builtins that we have
# considered safe.
@@ -277,6 +336,8 @@ def run_query(self, query, user):
restricted_globals["get_current_user"] = self.get_current_user
restricted_globals["execute_query"] = self.execute_query
restricted_globals["add_result_column"] = self.add_result_column
+ if pandas_installed:
+ restricted_globals["dataframe_to_result"] = self.dataframe_to_result
restricted_globals["add_result_row"] = self.add_result_row
restricted_globals["disable_print_log"] = self._custom_print.disable
restricted_globals["enable_print_log"] = self._custom_print.enable
@@ -295,14 +356,14 @@ def run_query(self, query, user):
exec(code, restricted_globals, self._script_locals)
- result = self._script_locals["result"]
- result["log"] = self._custom_print.lines
- json_data = json_dumps(result)
+ data = self._script_locals["result"]
+ self.validate_result(data)
+ data["log"] = self._custom_print.lines
except Exception as e:
error = str(type(e)) + " " + str(e)
- json_data = None
+ data = None
- return json_data, error
+ return data, error
register(Python)
diff --git a/redash/query_runner/qubole.py b/redash/query_runner/qubole.py
deleted file mode 100644
index 7e91fb36f8..0000000000
--- a/redash/query_runner/qubole.py
+++ /dev/null
@@ -1,181 +0,0 @@
-import time
-import requests
-import logging
-from io import StringIO
-
-from redash.query_runner import (
- BaseQueryRunner,
- register,
- JobTimeoutException,
- TYPE_STRING,
-)
-from redash.utils import json_dumps
-
-try:
- import qds_sdk
- from qds_sdk.qubole import Qubole as qbol
- from qds_sdk.commands import Command, HiveCommand
- from qds_sdk.commands import SqlCommand, PrestoCommand
-
- enabled = True
-except ImportError:
- enabled = False
-
-
-class Qubole(BaseQueryRunner):
- should_annotate_query = False
-
- @classmethod
- def configuration_schema(cls):
- return {
- "type": "object",
- "properties": {
- "query_type": {
- "type": "string",
- "title": "Query Type (quantum / presto / hive)",
- "default": "hive",
- },
- "endpoint": {
- "type": "string",
- "title": "API Endpoint",
- "default": "https://api.qubole.com",
- },
- "token": {"type": "string", "title": "Auth Token"},
- "cluster": {
- "type": "string",
- "title": "Cluster Label",
- "default": "default",
- },
- },
- "order": ["query_type", "endpoint", "token", "cluster"],
- "required": ["endpoint", "token"],
- "secret": ["token"],
- }
-
- @classmethod
- def type(cls):
- return "qubole"
-
- @classmethod
- def name(cls):
- return "Qubole"
-
- @classmethod
- def enabled(cls):
- return enabled
-
- def test_connection(self):
- headers = self._get_header()
- r = requests.head(
- "%s/api/latest/users" % self.configuration.get("endpoint"), headers=headers
- )
- r.status_code == 200
-
- def run_query(self, query, user):
- qbol.configure(
- api_token=self.configuration.get("token"),
- api_url="%s/api" % self.configuration.get("endpoint"),
- )
-
- try:
- query_type = self.configuration.get("query_type", "hive")
-
- if query_type == "quantum":
- cmd = SqlCommand.create(query=query)
- elif query_type == "hive":
- cmd = HiveCommand.create(
- query=query, label=self.configuration.get("cluster")
- )
- elif query_type == "presto":
- cmd = PrestoCommand.create(
- query=query, label=self.configuration.get("cluster")
- )
- else:
- raise Exception(
- "Invalid Query Type:%s.\
- It must be : hive / presto / quantum."
- % self.configuration.get("query_type")
- )
-
- logging.info(
- "Qubole command created with Id: %s and Status: %s", cmd.id, cmd.status
- )
-
- while not Command.is_done(cmd.status):
- time.sleep(qbol.poll_interval)
- cmd = Command.find(cmd.id)
- logging.info("Qubole command Id: %s and Status: %s", cmd.id, cmd.status)
-
- rows = []
- columns = []
- error = None
-
- if cmd.status == "done":
- fp = StringIO()
- cmd.get_results(
- fp=fp,
- inline=True,
- delim="\t",
- fetch=False,
- qlog=None,
- arguments=["true"],
- )
-
- results = fp.getvalue()
- fp.close()
-
- data = results.split("\r\n")
- columns = self.fetch_columns(
- [(i, TYPE_STRING) for i in data.pop(0).split("\t")]
- )
- rows = [
- dict(zip((column["name"] for column in columns), row.split("\t")))
- for row in data
- ]
-
- json_data = json_dumps({"columns": columns, "rows": rows})
- except (KeyboardInterrupt, JobTimeoutException):
- logging.info("Sending KILL signal to Qubole Command Id: %s", cmd.id)
- cmd.cancel()
- raise
-
- return json_data, error
-
- def get_schema(self, get_stats=False):
- schemas = {}
- try:
- headers = self._get_header()
- content = requests.get(
- "%s/api/latest/hive?describe=true&per_page=10000"
- % self.configuration.get("endpoint"),
- headers=headers,
- )
- data = content.json()
-
- for schema in data["schemas"]:
- tables = data["schemas"][schema]
- for table in tables:
- table_name = list(table.keys())[0]
- columns = [f["name"] for f in table[table_name]["columns"]]
-
- if schema != "default":
- table_name = "{}.{}".format(schema, table_name)
-
- schemas[table_name] = {"name": table_name, "columns": columns}
-
- except Exception as e:
- logging.error(
- "Failed to get schema information from Qubole. Error {}".format(str(e))
- )
-
- return list(schemas.values())
-
- def _get_header(self):
- return {
- "Content-type": "application/json",
- "Accept": "application/json",
- "X-AUTH-TOKEN": self.configuration.get("token"),
- }
-
-
-register(Qubole)
diff --git a/redash/query_runner/query_results.py b/redash/query_runner/query_results.py
index 7da4ff0afb..3fdc40c294 100644
--- a/redash/query_runner/query_results.py
+++ b/redash/query_runner/query_results.py
@@ -1,17 +1,21 @@
+import datetime
+import decimal
+import hashlib
import logging
import re
import sqlite3
+from urllib.parse import parse_qs
from redash import models
from redash.permissions import has_access, view_only
from redash.query_runner import (
- BaseQueryRunner,
TYPE_STRING,
+ BaseQueryRunner,
+ JobTimeoutException,
guess_type,
register,
- JobTimeoutException,
)
-from redash.utils import json_dumps, json_loads
+from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -24,6 +28,10 @@ class CreateTableError(Exception):
pass
+def extract_query_params(query):
+ return re.findall(r"(?:join|from)\s+param_query_(\d+)_{([^}]+)}", query, re.IGNORECASE)
+
+
def extract_query_ids(query):
queries = re.findall(r"(?:join|from)\s+query_(\d+)", query, re.IGNORECASE)
return [int(q) for q in queries]
@@ -48,7 +56,14 @@ def _load_query(user, query_id):
return query
-def get_query_results(user, query_id, bring_from_cache):
+def replace_query_parameters(query_text, params):
+ qs = parse_qs(params)
+ for key, value in qs.items():
+ query_text = query_text.replace("{{{{{my_key}}}}}".format(my_key=key), value[0])
+ return query_text
+
+
+def get_query_results(user, query_id, bring_from_cache, params=None):
query = _load_query(user, query_id)
if bring_from_cache:
if query.latest_query_data_id is not None:
@@ -56,23 +71,31 @@ def get_query_results(user, query_id, bring_from_cache):
else:
raise Exception("No cached result available for query {}.".format(query.id))
else:
- results, error = query.data_source.query_runner.run_query(
- query.query_text, user
- )
+ query_text = query.query_text
+ if params is not None:
+ query_text = replace_query_parameters(query_text, params)
+
+ results, error = query.data_source.query_runner.run_query(query_text, user)
if error:
raise Exception("Failed loading results for query id {}.".format(query.id))
- else:
- results = json_loads(results)
return results
-def create_tables_from_query_ids(user, connection, query_ids, cached_query_ids=[]):
+def create_tables_from_query_ids(user, connection, query_ids, query_params, cached_query_ids=[]):
for query_id in set(cached_query_ids):
results = get_query_results(user, query_id, True)
table_name = "cached_query_{query_id}".format(query_id=query_id)
create_table(connection, table_name, results)
+ for query in set(query_params):
+ results = get_query_results(user, query[0], False, query[1])
+ table_hash = hashlib.md5(
+ "query_{query}_{hash}".format(query=query[0], hash=query[1]).encode(), usedforsecurity=False
+ ).hexdigest()
+ table_name = "query_{query_id}_{param_hash}".format(query_id=query[0], param_hash=table_hash)
+ create_table(connection, table_name, results)
+
for query_id in set(query_ids):
results = get_query_results(user, query_id, False)
table_name = "query_{query_id}".format(query_id=query_id)
@@ -80,12 +103,16 @@ def create_tables_from_query_ids(user, connection, query_ids, cached_query_ids=[
def fix_column_name(name):
- return '"{}"'.format(re.sub('[:."\s]', "_", name, flags=re.UNICODE))
+ return '"{}"'.format(re.sub(r'[:."\s]', "_", name, flags=re.UNICODE))
def flatten(value):
if isinstance(value, (list, dict)):
return json_dumps(value)
+ elif isinstance(value, decimal.Decimal):
+ return float(value)
+ elif isinstance(value, datetime.timedelta):
+ return str(value)
else:
return value
@@ -102,9 +129,7 @@ def create_table(connection, table_name, query_results):
logger.debug("CREATE TABLE query: %s", create_table)
connection.execute(create_table)
except sqlite3.OperationalError as exc:
- raise CreateTableError(
- "Error creating table {}: {}".format(table_name, str(exc))
- )
+ raise CreateTableError("Error creating table {}: {}".format(table_name, str(exc)))
insert_template = "insert into {table_name} ({column_list}) values ({place_holders})".format(
table_name=table_name,
@@ -117,6 +142,17 @@ def create_table(connection, table_name, query_results):
connection.execute(insert_template, values)
+def prepare_parameterized_query(query, query_params):
+ for params in query_params:
+ table_hash = hashlib.md5(
+ "query_{query}_{hash}".format(query=params[0], hash=params[1]).encode(), usedforsecurity=False
+ ).hexdigest()
+ key = "param_query_{query_id}_{{{param_string}}}".format(query_id=params[0], param_string=params[1])
+ value = "query_{query_id}_{param_hash}".format(query_id=params[0], param_hash=table_hash)
+ query = query.replace(key, value)
+ return query
+
+
class Results(BaseQueryRunner):
should_annotate_query = False
noop_query = "SELECT 1"
@@ -133,11 +169,17 @@ def run_query(self, query, user):
connection = sqlite3.connect(":memory:")
query_ids = extract_query_ids(query)
+
+ query_params = extract_query_params(query)
+
cached_query_ids = extract_cached_query_ids(query)
- create_tables_from_query_ids(user, connection, query_ids, cached_query_ids)
+ create_tables_from_query_ids(user, connection, query_ids, query_params, cached_query_ids)
cursor = connection.cursor()
+ if query_params is not None:
+ query = prepare_parameterized_query(query, query_params)
+
try:
cursor.execute(query)
@@ -160,16 +202,15 @@ def run_query(self, query, user):
data = {"columns": columns, "rows": rows}
error = None
- json_data = json_dumps(data)
else:
error = "Query completed but it returned no data."
- json_data = None
+ data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
finally:
connection.close()
- return json_data, error
+ return data, error
register(Results)
diff --git a/redash/query_runner/risingwave.py b/redash/query_runner/risingwave.py
new file mode 100644
index 0000000000..2cc241d095
--- /dev/null
+++ b/redash/query_runner/risingwave.py
@@ -0,0 +1,45 @@
+from redash.query_runner import register
+from redash.query_runner.pg import PostgreSQL
+
+
+class RisingWave(PostgreSQL):
+ @classmethod
+ def type(cls):
+ return "risingwave"
+
+ @classmethod
+ def name(cls):
+ return "RisingWave"
+
+ def _get_tables(self, schema):
+ query = """
+ SELECT s.nspname as table_schema,
+ c.relname as table_name,
+ a.attname as column_name,
+ null as data_type
+ FROM pg_class c
+ JOIN pg_namespace s
+ ON c.relnamespace = s.oid
+ AND s.nspname NOT IN ('pg_catalog', 'information_schema', 'rw_catalog')
+ JOIN pg_attribute a
+ ON a.attrelid = c.oid
+ AND a.attnum > 0
+ AND NOT a.attisdropped
+ WHERE c.relkind IN ('m', 'f', 'p')
+
+ UNION
+
+ SELECT table_schema,
+ table_name,
+ column_name,
+ data_type
+ FROM information_schema.columns
+ WHERE table_schema NOT IN ('pg_catalog', 'information_schema', 'rw_catalog');
+ """
+
+ self._get_definitions(schema, query)
+
+ return list(schema.values())
+
+
+register(RisingWave)
diff --git a/redash/query_runner/rockset.py b/redash/query_runner/rockset.py
index a8e0f0eb6c..96910b8be9 100644
--- a/redash/query_runner/rockset.py
+++ b/redash/query_runner/rockset.py
@@ -1,6 +1,13 @@
import requests
-from redash.query_runner import *
-from redash.utils import json_dumps
+
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ register,
+)
def _get_type(value):
@@ -17,15 +24,15 @@ def _get_type(value):
# The following is here, because Rockset's PyPi package is Python 3 only.
# Should be removed once we move to Python 3.
-class RocksetAPI(object):
- def __init__(self, api_key, api_server):
+class RocksetAPI:
+ def __init__(self, api_key, api_server, vi_id):
self.api_key = api_key
self.api_server = api_server
+ self.vi_id = vi_id
- def _request(self, endpoint, method='GET', body=None):
- headers = {'Authorization': 'ApiKey {}'.format(self.api_key),
- 'User-Agent': 'rest:redash/1.0'}
- url = '{}/v1/orgs/self/{}'.format(self.api_server, endpoint)
+ def _request(self, endpoint, method="GET", body=None):
+ headers = {"Authorization": "ApiKey {}".format(self.api_key), "User-Agent": "rest:redash/1.0"}
+ url = "{}/v1/orgs/self/{}".format(self.api_server, endpoint)
if method == "GET":
r = requests.get(url, headers=headers)
@@ -37,19 +44,22 @@ def _request(self, endpoint, method='GET', body=None):
raise "Unknown method: {}".format(method)
def list_workspaces(self):
- response = self._request('ws')
- return [x['name'] for x in response['data'] if x['collection_count'] > 0]
+ response = self._request("ws")
+ return [x["name"] for x in response["data"] if x["collection_count"] > 0]
- def list_collections(self, workspace='commons'):
- response = self._request('ws/{}/collections'.format(workspace))
- return [x['name'] for x in response['data']]
+ def list_collections(self, workspace="commons"):
+ response = self._request("ws/{}/collections".format(workspace))
+ return [x["name"] for x in response["data"]]
def collection_columns(self, workspace, collection):
response = self.query('DESCRIBE "{}"."{}" OPTION(max_field_depth=1)'.format(workspace, collection))
- return sorted(set([x['field'][0] for x in response['results']]))
+ return sorted(set([x["field"][0] for x in response["results"]]))
def query(self, sql):
- return self._request("queries", "POST", {"sql": {"query": sql}})
+ query_path = "queries"
+ if self.vi_id is not None and self.vi_id != "":
+ query_path = f"virtualinstances/{self.vi_id}/queries"
+ return self._request(query_path, "POST", {"sql": {"query": sql}})
class Rockset(BaseSQLQueryRunner):
@@ -66,8 +76,9 @@ def configuration_schema(cls):
"default": "https://api.rs2.usw2.rockset.com",
},
"api_key": {"title": "API Key", "type": "string"},
+ "vi_id": {"title": "Virtual Instance ID", "type": "string"},
},
- "order": ["api_key", "api_server"],
+ "order": ["api_key", "api_server", "vi_id"],
"required": ["api_server", "api_key"],
"secret": ["api_key"],
}
@@ -80,18 +91,19 @@ def __init__(self, configuration):
super(Rockset, self).__init__(configuration)
self.api = RocksetAPI(
self.configuration.get("api_key"),
- self.configuration.get("api_server", "https://api.rs2.usw2.rockset.com"),
+ self.configuration.get("api_server", "https://api.usw2a1.rockset.com"),
+ self.configuration.get("vi_id"),
)
def _get_tables(self, schema):
for workspace in self.api.list_workspaces():
for collection in self.api.list_collections(workspace):
- table_name = collection if workspace == 'commons' else '{}.{}'.format(workspace, collection)
+ table_name = collection if workspace == "commons" else "{}.{}".format(workspace, collection)
schema[table_name] = {
- 'name': table_name,
- 'columns': self.api.collection_columns(workspace, collection)
+ "name": table_name,
+ "columns": self.api.collection_columns(workspace, collection),
}
- return sorted(schema.values(), key=lambda x: x['name'])
+ return sorted(schema.values(), key=lambda x: x["name"])
def run_query(self, query, user):
results = self.api.query(query)
@@ -107,10 +119,8 @@ def run_query(self, query, user):
if len(rows) > 0:
columns = []
for k in rows[0]:
- columns.append(
- {"name": k, "friendly_name": k, "type": _get_type(rows[0][k])}
- )
- data = json_dumps({"columns": columns, "rows": rows})
+ columns.append({"name": k, "friendly_name": k, "type": _get_type(rows[0][k])})
+ data = {"columns": columns, "rows": rows}
return data, None
diff --git a/redash/query_runner/salesforce.py b/redash/query_runner/salesforce.py
index fb0eeea9bb..5e0e018c2e 100644
--- a/redash/query_runner/salesforce.py
+++ b/redash/query_runner/salesforce.py
@@ -1,25 +1,27 @@
-import re
import logging
+import re
from collections import OrderedDict
-from redash.query_runner import BaseQueryRunner, register
+
from redash.query_runner import (
- TYPE_STRING,
+ TYPE_BOOLEAN,
TYPE_DATE,
TYPE_DATETIME,
- TYPE_INTEGER,
TYPE_FLOAT,
- TYPE_BOOLEAN,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
)
-from redash.utils import json_dumps
logger = logging.getLogger(__name__)
try:
- from simple_salesforce import Salesforce as SimpleSalesforce, SalesforceError
+ from simple_salesforce import Salesforce as SimpleSalesforce
+ from simple_salesforce import SalesforceError
from simple_salesforce.api import DEFAULT_API_VERSION
enabled = True
-except ImportError as e:
+except ImportError:
enabled = False
# See https://developer.salesforce.com/docs/atlas.en-us.api.meta/api/field_types.htm
@@ -78,7 +80,7 @@ def configuration_schema(cls):
"default": DEFAULT_API_VERSION,
},
},
- "required": ["username", "password", "token"],
+ "required": ["username", "password"],
"secret": ["password", "token"],
}
@@ -163,11 +165,10 @@ def run_query(self, query, user):
columns = self.fetch_columns(cols)
error = None
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
except SalesforceError as err:
error = err.content
- json_data = None
- return json_data, error
+ data = None
+ return data, error
def get_schema(self, get_stats=False):
sf = self._get_sf()
diff --git a/redash/query_runner/script.py b/redash/query_runner/script.py
index 9adb1851c9..ae6300c82a 100644
--- a/redash/query_runner/script.py
+++ b/redash/query_runner/script.py
@@ -1,8 +1,7 @@
import os
import subprocess
-import sys
-from redash.query_runner import *
+from redash.query_runner import BaseQueryRunner, register
def query_to_script_path(path, query):
@@ -62,9 +61,7 @@ def __init__(self, configuration):
# Poor man's protection against running scripts from outside the scripts directory
if self.configuration["path"].find("../") > -1:
- raise ValueError(
- "Scripts can only be run from the configured scripts directory"
- )
+ raise ValueError("Scripts can only be run from the configured scripts directory")
def test_connection(self):
pass
diff --git a/redash/query_runner/snowflake.py b/redash/query_runner/snowflake.py
index 24c7481565..bb67c20d68 100644
--- a/redash/query_runner/snowflake.py
+++ b/redash/query_runner/snowflake.py
@@ -1,20 +1,22 @@
try:
import snowflake.connector
+
enabled = True
except ImportError:
enabled = False
-from redash.query_runner import BaseQueryRunner, register
+from redash import __version__
from redash.query_runner import (
- TYPE_STRING,
+ TYPE_BOOLEAN,
TYPE_DATE,
TYPE_DATETIME,
- TYPE_INTEGER,
TYPE_FLOAT,
- TYPE_BOOLEAN,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ register,
)
-from redash.utils import json_dumps, json_loads
TYPES_MAP = {
0: TYPE_INTEGER,
@@ -30,7 +32,7 @@
}
-class Snowflake(BaseQueryRunner):
+class Snowflake(BaseSQLQueryRunner):
noop_query = "SELECT 1"
@classmethod
@@ -39,14 +41,27 @@ def configuration_schema(cls):
"type": "object",
"properties": {
"account": {"type": "string"},
- "region": {"type": "string", "default": "us-west"},
"user": {"type": "string"},
"password": {"type": "string"},
"warehouse": {"type": "string"},
"database": {"type": "string"},
+ "region": {"type": "string", "default": "us-west"},
+ "lower_case_columns": {
+ "type": "boolean",
+ "title": "Lower Case Column Names in Results",
+ "default": False,
+ },
"host": {"type": "string"},
},
- "order": ["account", "region", "user", "password", "warehouse", "database", "host"],
+ "order": [
+ "account",
+ "user",
+ "password",
+ "warehouse",
+ "database",
+ "region",
+ "host",
+ ],
"required": ["user", "password", "account", "database", "warehouse"],
"secret": ["password"],
"extra_options": [
@@ -81,24 +96,28 @@ def _get_connection(self):
else:
host = "{}.snowflakecomputing.com".format(account)
-
connection = snowflake.connector.connect(
- user = self.configuration["user"],
- password = self.configuration["password"],
- account = account,
- region = region,
- host = host
+ user=self.configuration["user"],
+ password=self.configuration["password"],
+ account=account,
+ region=region,
+ host=host,
+ application="Redash/{} (Snowflake)".format(__version__.split("-")[0]),
)
return connection
+ def _column_name(self, column_name):
+ if self.configuration.get("lower_case_columns", False):
+ return column_name.lower()
+
+ return column_name
+
def _parse_results(self, cursor):
columns = self.fetch_columns(
- [(i[0], self.determine_type(i[1], i[5])) for i in cursor.description]
+ [(self._column_name(i[0]), self.determine_type(i[1], i[5])) for i in cursor.description]
)
- rows = [
- dict(zip((column["name"] for column in columns), row)) for row in cursor
- ]
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
data = {"columns": columns, "rows": rows}
return data
@@ -115,12 +134,11 @@ def run_query(self, query, user):
data = self._parse_results(cursor)
error = None
- json_data = json_dumps(data)
finally:
cursor.close()
connection.close()
- return json_data, error
+ return data, error
def _run_query_without_warehouse(self, query):
connection = self._get_connection()
@@ -137,10 +155,10 @@ def _run_query_without_warehouse(self, query):
cursor.close()
connection.close()
- return data, error
-
+ return data, error
+
def _database_name_includes_schema(self):
- return '.' in self.configuration.get('database')
+ return "." in self.configuration.get("database")
def get_schema(self, get_stats=False):
if self._database_name_includes_schema():
@@ -151,7 +169,7 @@ def get_schema(self, get_stats=False):
results, error = self._run_query_without_warehouse(query)
if error is not None:
- raise Exception("Failed getting schema.")
+ self._handle_run_query_error(error)
schema = {}
for row in results["rows"]:
diff --git a/redash/query_runner/sparql_endpoint.py b/redash/query_runner/sparql_endpoint.py
index b9a00579ab..2f47c21db1 100644
--- a/redash/query_runner/sparql_endpoint.py
+++ b/redash/query_runner/sparql_endpoint.py
@@ -3,19 +3,18 @@
seeAlso: https://www.w3.org/TR/rdf-sparql-query/
"""
-import logging
import json
+import logging
from os import environ
-import re
from redash.query_runner import BaseQueryRunner
-from redash.utils import json_dumps, json_loads
+
from . import register
try:
import requests
from cmem.cmempy.queries import SparqlQuery
- from rdflib.plugins.sparql import prepareQuery
+ from rdflib.plugins.sparql import prepareQuery # noqa
enabled = True
except ImportError:
@@ -83,7 +82,7 @@ def _transform_sparql_results(results):
logger.info("results are: {}".format(results))
# Not sure why we do not use the json package here but all other
# query runner do it the same way :-)
- sparql_results = json_loads(results)
+ sparql_results = results
# transform all bindings to redash rows
rows = []
for sparql_row in sparql_results["results"]["bindings"]:
@@ -101,7 +100,7 @@ def _transform_sparql_results(results):
columns.append({"name": var, "friendly_name": var, "type": "string"})
# Not sure why we do not use the json package here but all other
# query runner do it the same way :-)
- return json_dumps({"columns": columns, "rows": rows})
+ return {"columns": columns, "rows": rows}
@classmethod
def name(cls):
@@ -125,9 +124,7 @@ def run_query(self, query, user):
query = SparqlQuery(query_text)
query_type = query.get_query_type()
if query_type not in ["SELECT", None]:
- raise ValueError(
- "Queries of type {} can not be processed by redash.".format(query_type)
- )
+ raise ValueError("Queries of type {} can not be processed by redash.".format(query_type))
self._setup_environment()
try:
diff --git a/redash/query_runner/sqlite.py b/redash/query_runner/sqlite.py
index d056dff98f..6891088492 100644
--- a/redash/query_runner/sqlite.py
+++ b/redash/query_runner/sqlite.py
@@ -1,8 +1,11 @@
import logging
import sqlite3
-from redash.query_runner import BaseSQLQueryRunner, register, JobTimeoutException
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ BaseSQLQueryRunner,
+ JobTimeoutException,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -36,16 +39,13 @@ def _get_tables(self, schema):
if error is not None:
raise Exception("Failed getting schema.")
- results = json_loads(results)
-
for row in results["rows"]:
table_name = row["tbl_name"]
schema[table_name] = {"name": table_name, "columns": []}
results_table, error = self.run_query(query_columns % (table_name,), None)
if error is not None:
- raise Exception("Failed getting schema.")
+ self._handle_run_query_error(error)
- results_table = json_loads(results_table)
for row_column in results_table["rows"]:
schema[table_name]["columns"].append(row_column["name"])
@@ -61,23 +61,19 @@ def run_query(self, query, user):
if cursor.description is not None:
columns = self.fetch_columns([(i[0], None) for i in cursor.description])
- rows = [
- dict(zip((column["name"] for column in columns), row))
- for row in cursor
- ]
+ rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
data = {"columns": columns, "rows": rows}
error = None
- json_data = json_dumps(data)
else:
error = "Query completed but it returned no data."
- json_data = None
+ data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
finally:
connection.close()
- return json_data, error
+ return data, error
register(Sqlite)
diff --git a/redash/query_runner/tinybird.py b/redash/query_runner/tinybird.py
new file mode 100644
index 0000000000..f29a45a8cb
--- /dev/null
+++ b/redash/query_runner/tinybird.py
@@ -0,0 +1,113 @@
+import logging
+
+import requests
+
+from redash.query_runner import register
+from redash.query_runner.clickhouse import ClickHouse
+
+logger = logging.getLogger(__name__)
+
+
+class Tinybird(ClickHouse):
+ noop_query = "SELECT count() FROM tinybird.pipe_stats LIMIT 1"
+
+ DEFAULT_URL = "https://api.tinybird.co"
+
+ SQL_ENDPOINT = "/v0/sql"
+ DATASOURCES_ENDPOINT = "/v0/datasources"
+ PIPES_ENDPOINT = "/v0/pipes"
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "url": {"type": "string", "default": cls.DEFAULT_URL},
+ "token": {"type": "string", "title": "Auth Token"},
+ "timeout": {
+ "type": "number",
+ "title": "Request Timeout",
+ "default": 30,
+ },
+ "verify": {
+ "type": "boolean",
+ "title": "Verify SSL certificate",
+ "default": True,
+ },
+ },
+ "order": ["url", "token"],
+ "required": ["token"],
+ "extra_options": ["timeout", "verify"],
+ "secret": ["token"],
+ }
+
+ def _get_tables(self, schema):
+ self._collect_tinybird_schema(
+ schema,
+ self.DATASOURCES_ENDPOINT,
+ "datasources",
+ )
+
+ self._collect_tinybird_schema(
+ schema,
+ self.PIPES_ENDPOINT,
+ "pipes",
+ )
+
+ return list(schema.values())
+
+ def _send_query(self, data, session_id=None, session_check=None):
+ return self._get_from_tinybird(
+ self.SQL_ENDPOINT,
+ params={"q": data.encode("utf-8", "ignore")},
+ )
+
+ def _collect_tinybird_schema(self, schema, endpoint, resource_type):
+ response = self._get_from_tinybird(endpoint)
+ resources = response.get(resource_type, [])
+
+ for r in resources:
+ if r["name"] not in schema:
+ schema[r["name"]] = {"name": r["name"], "columns": []}
+
+ if resource_type == "pipes" and not r.get("endpoint"):
+ continue
+
+ query = f"SELECT * FROM {r['name']} LIMIT 1 FORMAT JSON"
+ try:
+ query_result = self._send_query(query)
+ except Exception:
+ logger.exception(f"error in schema {r['name']}")
+ continue
+
+ columns = [meta["name"] for meta in query_result["meta"]]
+ schema[r["name"]]["columns"].extend(columns)
+
+ return schema
+
+ def _get_from_tinybird(self, endpoint, params=None):
+ url = f"{self.configuration.get('url', self.DEFAULT_URL)}{endpoint}"
+ authorization = f"Bearer {self.configuration.get('token')}"
+
+ try:
+ response = requests.get(
+ url,
+ timeout=self.configuration.get("timeout", 30),
+ params=params,
+ headers={"Authorization": authorization},
+ verify=self.configuration.get("verify", True),
+ )
+ except requests.RequestException as e:
+ if e.response:
+ details = f"({e.__class__.__name__}, Status Code: {e.response.status_code})"
+ else:
+ details = f"({e.__class__.__name__})"
+ raise Exception(f"Connection error to: {url} {details}.")
+
+ if response.status_code >= 400:
+ raise Exception(response.text)
+
+ return response.json()
+
+
+register(Tinybird)
diff --git a/redash/query_runner/treasuredata.py b/redash/query_runner/treasuredata.py
index 3e53b136ce..ddd1d50e46 100644
--- a/redash/query_runner/treasuredata.py
+++ b/redash/query_runner/treasuredata.py
@@ -1,7 +1,14 @@
import logging
-from redash.query_runner import *
-from redash.utils import json_dumps
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -69,17 +76,17 @@ def get_schema(self, get_stats=False):
schema = {}
if self.configuration.get("get_schema", False):
try:
- with tdclient.Client(self.configuration.get("apikey"),endpoint=self.configuration.get("endpoint")) as client:
+ with tdclient.Client(
+ self.configuration.get("apikey"), endpoint=self.configuration.get("endpoint")
+ ) as client:
for table in client.tables(self.configuration.get("db")):
- table_name = "{}.{}".format(
- self.configuration.get("db"), table.name
- )
+ table_name = "{}.{}".format(self.configuration.get("db"), table.name)
for table_schema in table.schema:
schema[table_name] = {
"name": table_name,
"columns": [column[0] for column in table.schema],
}
- except Exception as ex:
+ except Exception:
raise Exception("Failed getting schema")
return list(schema.values())
@@ -95,30 +102,23 @@ def run_query(self, query, user):
try:
cursor.execute(query)
columns_tuples = [
- (i[0], TD_TYPES_MAPPING.get(i[1], None))
- for i in cursor.show_job()["hive_result_schema"]
+ (i[0], TD_TYPES_MAPPING.get(i[1], None)) for i in cursor.show_job()["hive_result_schema"]
]
columns = self.fetch_columns(columns_tuples)
if cursor.rowcount == 0:
rows = []
else:
- rows = [
- dict(zip(([column["name"] for column in columns]), r))
- for r in cursor.fetchall()
- ]
+ rows = [dict(zip(([column["name"] for column in columns]), r)) for r in cursor.fetchall()]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
except errors.InternalError as e:
- json_data = None
+ data = None
error = "%s: %s" % (
str(e),
- cursor.show_job()
- .get("debug", {})
- .get("stderr", "No stderr message in the response"),
+ cursor.show_job().get("debug", {}).get("stderr", "No stderr message in the response"),
)
- return json_data, error
+ return data, error
register(TreasureData)
diff --git a/redash/query_runner/trino.py b/redash/query_runner/trino.py
index bfcc1e294c..fbbfab9bd7 100644
--- a/redash/query_runner/trino.py
+++ b/redash/query_runner/trino.py
@@ -1,7 +1,17 @@
import logging
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseQueryRunner,
+ InterruptException,
+ JobTimeoutException,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -15,24 +25,19 @@
TRINO_TYPES_MAPPING = {
"boolean": TYPE_BOOLEAN,
-
"tinyint": TYPE_INTEGER,
"smallint": TYPE_INTEGER,
"integer": TYPE_INTEGER,
"long": TYPE_INTEGER,
"bigint": TYPE_INTEGER,
-
"float": TYPE_FLOAT,
"real": TYPE_FLOAT,
"double": TYPE_FLOAT,
-
"decimal": TYPE_INTEGER,
-
"varchar": TYPE_STRING,
"char": TYPE_STRING,
"string": TYPE_STRING,
"json": TYPE_STRING,
-
"date": TYPE_DATE,
"timestamp": TYPE_DATETIME,
}
@@ -65,7 +70,7 @@ def configuration_schema(cls):
"schema",
],
"required": ["host", "username"],
- "secret": ["password"]
+ "secret": ["password"],
}
@classmethod
@@ -77,33 +82,55 @@ def type(cls):
return "trino"
def get_schema(self, get_stats=False):
+ if self.configuration.get("catalog"):
+ catalogs = [self.configuration.get("catalog")]
+ else:
+ catalogs = self._get_catalogs()
+
+ schema = {}
+ for catalog in catalogs:
+ query = f"""
+ SELECT table_schema, table_name, column_name, data_type
+ FROM {catalog}.information_schema.columns
+ WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
+ """
+ results, error = self.run_query(query, None)
+
+ if error is not None:
+ self._handle_run_query_error(error)
+
+ for row in results["rows"]:
+ table_name = f'{catalog}.{row["table_schema"]}.{row["table_name"]}'
+
+ if table_name not in schema:
+ schema[table_name] = {"name": table_name, "columns": []}
+
+ column = {"name": row["column_name"], "type": row["data_type"]}
+ schema[table_name]["columns"].append(column)
+
+ return list(schema.values())
+
+ def _get_catalogs(self):
query = """
- SELECT table_schema, table_name, column_name
- FROM information_schema.columns
- WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
+ SHOW CATALOGS
"""
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
+ self._handle_run_query_error(error)
- results = json_loads(results)
- schema = {}
+ catalogs = []
for row in results["rows"]:
- table_name = f'{row["table_schema"]}.{row["table_name"]}'
-
- if table_name not in schema:
- schema[table_name] = {"name": table_name, "columns": []}
-
- schema[table_name]["columns"].append(row["column_name"])
-
- return list(schema.values())
+ catalog = row["Catalog"]
+ if "." in catalog:
+ catalog = f'"{catalog}"'
+ catalogs.append(catalog)
+ return catalogs
def run_query(self, query, user):
if self.configuration.get("password"):
auth = trino.auth.BasicAuthentication(
- username=self.configuration.get("username"),
- password=self.configuration.get("password")
+ username=self.configuration.get("username"), password=self.configuration.get("password")
)
else:
auth = trino.constants.DEFAULT_AUTH
@@ -111,10 +138,10 @@ def run_query(self, query, user):
http_scheme=self.configuration.get("protocol", "http"),
host=self.configuration.get("host", ""),
port=self.configuration.get("port", 8080),
- catalog=self.configuration.get("catalog", "hive"),
- schema=self.configuration.get("schema", "default"),
+ catalog=self.configuration.get("catalog", ""),
+ schema=self.configuration.get("schema", ""),
user=self.configuration.get("username"),
- auth=auth
+ auth=auth,
)
cursor = connection.cursor()
@@ -123,21 +150,12 @@ def run_query(self, query, user):
cursor.execute(query)
results = cursor.fetchall()
description = cursor.description
- columns = self.fetch_columns([
- (c[0], TRINO_TYPES_MAPPING.get(c[1], None)) for c in description
- ])
- rows = [
- dict(zip([c["name"] for c in columns], r))
- for r in results
- ]
- data = {
- "columns": columns,
- "rows": rows
- }
- json_data = json_dumps(data)
+ columns = self.fetch_columns([(c[0], TRINO_TYPES_MAPPING.get(c[1], None)) for c in description])
+ rows = [dict(zip([c["name"] for c in columns], r)) for r in results]
+ data = {"columns": columns, "rows": rows}
error = None
except DatabaseError as db:
- json_data = None
+ data = None
default_message = "Unspecified DatabaseError: {0}".format(str(db))
if isinstance(db.args[0], dict):
message = db.args[0].get("failureInfo", {"message", None}).get("message")
@@ -148,7 +166,7 @@ def run_query(self, query, user):
cursor.cancel()
raise
- return json_data, error
+ return data, error
register(Trino)
diff --git a/redash/query_runner/uptycs.py b/redash/query_runner/uptycs.py
index 15c9b30bc0..6e354181e5 100644
--- a/redash/query_runner/uptycs.py
+++ b/redash/query_runner/uptycs.py
@@ -1,10 +1,11 @@
-from redash.query_runner import *
-from redash.utils import json_dumps, json_loads
+import datetime
+import logging
import jwt
-import datetime
import requests
-import logging
+
+from redash.query_runner import BaseSQLQueryRunner, register
+from redash.utils import json_loads
logger = logging.getLogger(__name__)
@@ -57,14 +58,11 @@ def transformed_to_redash_json(self, data):
if "items" in data:
rows = data["items"]
- redash_json_data = {"columns": transformed_columns, "rows": rows}
- return redash_json_data
+ return {"columns": transformed_columns, "rows": rows}
def api_call(self, sql):
# JWT encoded header
- header = self.generate_header(
- self.configuration.get("key"), self.configuration.get("secret")
- )
+ header = self.generate_header(self.configuration.get("key"), self.configuration.get("secret"))
# URL form using API key file based on GLOBAL
url = "%s/public/api/customers/%s/query" % (
@@ -87,34 +85,29 @@ def api_call(self, sql):
else:
error = "status_code " + str(response.status_code) + "\n"
error = error + "failed to connect"
- json_data = {}
- return json_data, error
+ data = {}
+ return data, error
# if we get right status code then call transfored_to_redash
- json_data = self.transformed_to_redash_json(response_output)
+ data = self.transformed_to_redash_json(response_output)
error = None
# if we got error from Uptycs include error information
if "error" in response_output:
error = response_output["error"]["message"]["brief"]
error = error + "\n" + response_output["error"]["message"]["detail"]
- return json_data, error
+ return data, error
def run_query(self, query, user):
data, error = self.api_call(query)
- json_data = json_dumps(data)
- logger.debug("%s", json_data)
- return json_data, error
+ logger.debug("%s", data)
+ return data, error
def get_schema(self, get_stats=False):
- header = self.generate_header(
- self.configuration.get("key"), self.configuration.get("secret")
- )
+ header = self.generate_header(self.configuration.get("key"), self.configuration.get("secret"))
url = "%s/public/api/customers/%s/schema/global" % (
self.configuration.get("url"),
self.configuration.get("customer_id"),
)
- response = requests.get(
- url, headers=header, verify=self.configuration.get("verify_ssl", True)
- )
+ response = requests.get(url, headers=header, verify=self.configuration.get("verify_ssl", True))
redash_json = []
schema = json_loads(response.content)
for each_def in schema["tables"]:
diff --git a/redash/query_runner/vertica.py b/redash/query_runner/vertica.py
index a6989ee7dd..e178ed15f9 100644
--- a/redash/query_runner/vertica.py
+++ b/redash/query_runner/vertica.py
@@ -1,8 +1,15 @@
-import sys
import logging
-from redash.utils import json_loads, json_dumps
-from redash.query_runner import *
+from redash.query_runner import (
+ TYPE_BOOLEAN,
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_INTEGER,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -59,7 +66,7 @@ def configuration_schema(cls):
@classmethod
def enabled(cls):
try:
- import vertica_python
+ import vertica_python # noqa: F401
except ImportError:
return False
@@ -75,9 +82,7 @@ def _get_tables(self, schema):
results, error = self.run_query(query, None)
if error is not None:
- raise Exception("Failed getting schema.")
-
- results = json_loads(results)
+ self._handle_run_query_error(error)
for row in results["rows"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
@@ -93,9 +98,9 @@ def run_query(self, query, user):
import vertica_python
if query == "":
- json_data = None
+ data = None
error = "Query is empty"
- return json_data, error
+ return data, error
connection = None
try:
@@ -109,9 +114,7 @@ def run_query(self, query, user):
}
if self.configuration.get("connection_timeout"):
- conn_info["connection_timeout"] = self.configuration.get(
- "connection_timeout"
- )
+ conn_info["connection_timeout"] = self.configuration.get("connection_timeout")
connection = vertica_python.connect(**conn_info)
cursor = connection.cursor()
@@ -119,21 +122,15 @@ def run_query(self, query, user):
cursor.execute(query)
if cursor.description is not None:
- columns_data = [
- (i[0], types_map.get(i[1], None)) for i in cursor.description
- ]
+ columns_data = [(i[0], types_map.get(i[1], None)) for i in cursor.description]
columns = self.fetch_columns(columns_data)
- rows = [
- dict(zip(([c["name"] for c in columns]), r))
- for r in cursor.fetchall()
- ]
+ rows = [dict(zip(([c["name"] for c in columns]), r)) for r in cursor.fetchall()]
data = {"columns": columns, "rows": rows}
- json_data = json_dumps(data)
error = None
else:
- json_data = None
+ data = None
error = "No data was returned."
cursor.close()
@@ -141,7 +138,7 @@ def run_query(self, query, user):
if connection:
connection.close()
- return json_data, error
+ return data, error
register(Vertica)
diff --git a/redash/query_runner/yandex_disk.py b/redash/query_runner/yandex_disk.py
new file mode 100644
index 0000000000..5d305e23c8
--- /dev/null
+++ b/redash/query_runner/yandex_disk.py
@@ -0,0 +1,165 @@
+import logging
+from importlib.util import find_spec
+
+import requests
+import yaml
+
+from redash.query_runner import BaseSQLQueryRunner, register
+from redash.utils.pandas import pandas_installed
+
+openpyxl_installed = find_spec("openpyxl")
+
+if pandas_installed and openpyxl_installed:
+ import openpyxl # noqa: F401
+ import pandas as pd
+
+ from redash.utils.pandas import pandas_to_result
+
+ enabled = True
+
+ EXTENSIONS_READERS = {
+ "csv": pd.read_csv,
+ "tsv": pd.read_table,
+ "xls": pd.read_excel,
+ "xlsx": pd.read_excel,
+ }
+else:
+ enabled = False
+
+logger = logging.getLogger(__name__)
+
+
+class YandexDisk(BaseSQLQueryRunner):
+ should_annotate_query = False
+
+ @classmethod
+ def type(cls):
+ return "yandex_disk"
+
+ @classmethod
+ def name(cls):
+ return "Yandex Disk"
+
+ @classmethod
+ def configuration_schema(cls):
+ return {
+ "type": "object",
+ "properties": {
+ "token": {"type": "string", "title": "OAuth Token"},
+ },
+ "secret": ["token"],
+ "required": ["token"],
+ }
+
+ def __init__(self, configuration):
+ super(YandexDisk, self).__init__(configuration)
+ self.syntax = "yaml"
+ self.base_url = "https://cloud-api.yandex.net/v1/disk"
+ self.list_path = "counters"
+
+ def _get_tables(self, schema):
+ offset = 0
+ limit = 100
+
+ while True:
+ tmp_response = self._send_query(
+ "resources/public", media_type="spreadsheet,text", limit=limit, offset=offset
+ )
+
+ tmp_items = tmp_response["items"]
+
+ for file_info in tmp_items:
+ file_name = file_info["name"]
+ file_path = file_info["path"].replace("disk:", "")
+
+ file_extension = file_name.split(".")[-1].lower()
+ if file_extension not in EXTENSIONS_READERS:
+ continue
+
+ schema[file_name] = {"name": file_name, "columns": [file_path]}
+
+ if len(tmp_items) < limit:
+ break
+
+ offset += limit
+
+ return list(schema.values())
+
+ def test_connection(self):
+ self._send_query()
+
+ def _send_query(self, url_path="", **kwargs):
+ token = kwargs.pop("oauth_token", self.configuration["token"])
+ r = requests.get(
+ f"{self.base_url}/{url_path}",
+ headers={"Authorization": f"OAuth {token}"},
+ params=kwargs,
+ )
+
+ response_data = r.json()
+
+ if not r.ok:
+ error_message = f"Code: {r.status_code}, message: {r.text}"
+ raise Exception(error_message)
+ return response_data
+
+ def run_query(self, query, user):
+ logger.debug("Yandex Disk is about to execute query: %s", query)
+ data = None
+
+ if not query:
+ error = "Query is empty"
+ return data, error
+
+ try:
+ params = yaml.safe_load(query)
+ except (ValueError, AttributeError) as e:
+ logger.exception(e)
+ error = f"YAML read error: {str(e)}"
+ return data, error
+
+ if not isinstance(params, dict):
+ error = "The query format must be JSON or YAML"
+ return data, error
+
+ if "path" not in params:
+ error = "The query must contain path"
+ return data, error
+
+ file_extension = params["path"].split(".")[-1].lower()
+
+ read_params = {}
+ is_multiple_sheets = False
+
+ if file_extension not in EXTENSIONS_READERS:
+ error = f"Unsupported file extension: {file_extension}"
+ return data, error
+ elif file_extension in ("xls", "xlsx"):
+ read_params["sheet_name"] = params.get("sheet_name", 0)
+ if read_params["sheet_name"] is None:
+ is_multiple_sheets = True
+
+ file_url = self._send_query("resources/download", path=params["path"])["href"]
+
+ try:
+ df = EXTENSIONS_READERS[file_extension](file_url, **read_params)
+ except Exception as e:
+ logger.exception(e)
+ error = f"Read file error: {str(e)}"
+ return data, error
+
+ if is_multiple_sheets:
+ new_df = []
+ for sheet_name, sheet_df in df.items():
+ sheet_df["sheet_name"] = sheet_name
+ new_df.append(sheet_df)
+ new_df = pd.concat(new_df, ignore_index=True)
+ df = new_df.copy()
+
+ data = pandas_to_result(df)
+ error = None
+
+ return data, error
+
+
+register(YandexDisk)
diff --git a/redash/query_runner/yandex_metrica.py b/redash/query_runner/yandex_metrica.py
index 1802525e73..f8c7156ca8 100644
--- a/redash/query_runner/yandex_metrica.py
+++ b/redash/query_runner/yandex_metrica.py
@@ -1,11 +1,18 @@
import logging
-import yaml
from urllib.parse import parse_qs, urlparse
+import backoff
import requests
+import yaml
-from redash.query_runner import *
-from redash.utils import json_dumps
+from redash.query_runner import (
+ TYPE_DATE,
+ TYPE_DATETIME,
+ TYPE_FLOAT,
+ TYPE_STRING,
+ BaseSQLQueryRunner,
+ register,
+)
logger = logging.getLogger(__name__)
@@ -72,6 +79,10 @@ def parse_ym_response(response):
return {"columns": columns, "rows": rows}
+class QuotaException(Exception):
+ pass
+
+
class YandexMetrica(BaseSQLQueryRunner):
should_annotate_query = False
@@ -100,14 +111,11 @@ def __init__(self, configuration):
self.list_path = "counters"
def _get_tables(self, schema):
-
- counters = self._send_query("management/v1/{0}".format(self.list_path))
+ counters = self._send_query(f"management/v1/{self.list_path}")
for row in counters[self.list_path]:
owner = row.get("owner_login")
- counter = "{0} | {1}".format(
- row.get("name", "Unknown"), row.get("id", "Unknown")
- )
+ counter = f"{row.get('name', 'Unknown')} | {row.get('id', 'Unknown')}"
if owner not in schema:
schema[owner] = {"name": owner, "columns": []}
@@ -116,18 +124,26 @@ def _get_tables(self, schema):
return list(schema.values())
def test_connection(self):
- self._send_query("management/v1/{0}".format(self.list_path))
+ self._send_query(f"management/v1/{self.list_path}")
+ @backoff.on_exception(backoff.fibo, QuotaException, max_tries=10)
def _send_query(self, path="stat/v1/data", **kwargs):
token = kwargs.pop("oauth_token", self.configuration["token"])
r = requests.get(
- "{0}/{1}".format(self.url, path),
- headers={"Authorization": "OAuth {}".format(token)},
+ f"{self.url}/{path}",
+ headers={"Authorization": f"OAuth {token}"},
params=kwargs,
)
- if r.status_code != 200:
- raise Exception(r.text)
- return r.json()
+
+ response_data = r.json()
+
+ if not r.ok:
+ error_message = f"Code: {r.status_code}, message: {r.text}"
+ if r.status_code == 429:
+ logger.warning("Warning: 429 status code on Yandex Metrica query")
+ raise QuotaException(error_message)
+ raise Exception(error_message)
+ return response_data
def run_query(self, query, user):
logger.debug("Metrica is about to execute query: %s", query)
@@ -151,7 +167,7 @@ def run_query(self, query, user):
return data, error
try:
- data = json_dumps(parse_ym_response(self._send_query(**params)))
+ data = parse_ym_response(self._send_query(**params))
error = None
except Exception as e:
logging.exception(e)
diff --git a/redash/security.py b/redash/security.py
index d5ebaee547..c123abbf13 100644
--- a/redash/security.py
+++ b/redash/security.py
@@ -1,13 +1,12 @@
import functools
-from flask import session, request
+
+from flask import session
from flask_login import current_user
from flask_talisman import talisman
from flask_wtf.csrf import CSRFProtect, generate_csrf
-
from redash import settings
-
talisman = talisman.Talisman()
csrf = CSRFProtect()
@@ -18,9 +17,7 @@ def decorated(*args, **kwargs):
return fn(*args, **kwargs)
embedable_csp = talisman.content_security_policy + "frame-ancestors *;"
- return talisman(content_security_policy=embedable_csp, frame_options=None)(
- decorated
- )
+ return talisman(content_security_policy=embedable_csp, frame_options=None)(decorated)
def init_app(app):
@@ -35,19 +32,9 @@ def inject_csrf_token(response):
return response
if settings.ENFORCE_CSRF:
+
@app.before_request
def check_csrf():
- # BEGIN workaround until https://github.com/lepture/flask-wtf/pull/419 is merged
- if request.blueprint in csrf._exempt_blueprints:
- return
-
- view = app.view_functions.get(request.endpoint)
- dest = f'{view.__module__}.{view.__name__}'
-
- if dest in csrf._exempt_views:
- return
- # END workaround
-
if not current_user.is_authenticated or "user_id" in session:
csrf.protect()
diff --git a/redash/serializers/__init__.py b/redash/serializers/__init__.py
index 6105364c49..41a370e43e 100644
--- a/redash/serializers/__init__.py
+++ b/redash/serializers/__init__.py
@@ -3,19 +3,16 @@
classes we have. This will ensure cleaner code and better
separation of concerns.
"""
-from funcy import project
from flask_login import current_user
+from funcy import project
from rq.job import JobStatus
from rq.timeouts import JobTimeoutException
from redash import models
-from redash.permissions import has_access, view_only
-from redash.utils import json_loads
from redash.models.parameterized_query import ParameterizedQuery
-
-
-from .query_result import (
+from redash.permissions import has_access, view_only
+from redash.serializers.query_result import (
serialize_query_result,
serialize_query_result_to_dsv,
serialize_query_result_to_xlsx,
@@ -26,7 +23,7 @@ def public_widget(widget):
res = {
"id": widget.id,
"width": widget.width,
- "options": json_loads(widget.options),
+ "options": widget.options,
"text": widget.text,
"updated_at": widget.updated_at,
"created_at": widget.created_at,
@@ -38,7 +35,7 @@ def public_widget(widget):
"type": v.type,
"name": v.name,
"description": v.description,
- "options": json_loads(v.options),
+ "options": v.options,
"updated_at": v.updated_at,
"created_at": v.created_at,
"query": {
@@ -68,7 +65,7 @@ def public_dashboard(dashboard):
return dashboard_dict
-class Serializer(object):
+class Serializer:
pass
@@ -80,21 +77,12 @@ def __init__(self, object_or_list, **kwargs):
def serialize(self):
if isinstance(self.object_or_list, models.Query):
result = serialize_query(self.object_or_list, **self.options)
- if (
- self.options.get("with_favorite_state", True)
- and not current_user.is_api_user()
- ):
- result["is_favorite"] = models.Favorite.is_favorite(
- current_user.id, self.object_or_list
- )
+ if self.options.get("with_favorite_state", True) and not current_user.is_api_user():
+ result["is_favorite"] = models.Favorite.is_favorite(current_user.id, self.object_or_list)
else:
- result = [
- serialize_query(query, **self.options) for query in self.object_or_list
- ]
+ result = [serialize_query(query, **self.options) for query in self.object_or_list]
if self.options.get("with_favorite_state", True):
- favorite_ids = models.Favorite.are_favorites(
- current_user.id, self.object_or_list
- )
+ favorite_ids = models.Favorite.are_favorites(current_user.id, self.object_or_list)
for query in result:
query["is_favorite"] = query["id"] in favorite_ids
@@ -134,11 +122,7 @@ def serialize_query(
d["user_id"] = query.user_id
if with_last_modified_by:
- d["last_modified_by"] = (
- query.last_modified_by.to_dict()
- if query.last_modified_by is not None
- else None
- )
+ d["last_modified_by"] = query.last_modified_by.to_dict() if query.last_modified_by is not None else None
else:
d["last_modified_by_id"] = query.last_modified_by_id
@@ -151,10 +135,7 @@ def serialize_query(
d["runtime"] = None
if with_visualizations:
- d["visualizations"] = [
- serialize_visualization(vis, with_query=False)
- for vis in query.visualizations
- ]
+ d["visualizations"] = [serialize_visualization(vis, with_query=False) for vis in query.visualizations]
return d
@@ -165,7 +146,7 @@ def serialize_visualization(object, with_query=True):
"type": object.type,
"name": object.name,
"description": object.description,
- "options": json_loads(object.options),
+ "options": object.options,
"updated_at": object.updated_at,
"created_at": object.created_at,
}
@@ -180,7 +161,7 @@ def serialize_widget(object):
d = {
"id": object.id,
"width": object.width,
- "options": json_loads(object.options),
+ "options": object.options,
"dashboard_id": object.dashboard_id,
"text": object.text,
"updated_at": object.updated_at,
@@ -216,7 +197,7 @@ def serialize_alert(alert, full=True):
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True):
- layout = json_loads(obj.layout)
+ layout = obj.layout
widgets = []
@@ -277,21 +258,12 @@ def __init__(self, object_or_list, **kwargs):
def serialize(self):
if isinstance(self.object_or_list, models.Dashboard):
result = serialize_dashboard(self.object_or_list, **self.options)
- if (
- self.options.get("with_favorite_state", True)
- and not current_user.is_api_user()
- ):
- result["is_favorite"] = models.Favorite.is_favorite(
- current_user.id, self.object_or_list
- )
+ if self.options.get("with_favorite_state", True) and not current_user.is_api_user():
+ result["is_favorite"] = models.Favorite.is_favorite(current_user.id, self.object_or_list)
else:
- result = [
- serialize_dashboard(obj, **self.options) for obj in self.object_or_list
- ]
+ result = [serialize_dashboard(obj, **self.options) for obj in self.object_or_list]
if self.options.get("with_favorite_state", True):
- favorite_ids = models.Favorite.are_favorites(
- current_user.id, self.object_or_list
- )
+ favorite_ids = models.Favorite.are_favorites(current_user.id, self.object_or_list)
for obj in result:
obj["is_favorite"] = obj["id"] in favorite_ids
@@ -305,6 +277,9 @@ def serialize_job(job):
JobStatus.STARTED: 2,
JobStatus.FINISHED: 3,
JobStatus.FAILED: 4,
+ JobStatus.CANCELED: 5,
+ JobStatus.DEFERRED: 6,
+ JobStatus.SCHEDULED: 7,
}
job_status = job.get_status()
diff --git a/redash/serializers/query_result.py b/redash/serializers/query_result.py
index 9eab2a1a42..1944d4cf8f 100644
--- a/redash/serializers/query_result.py
+++ b/redash/serializers/query_result.py
@@ -1,11 +1,12 @@
-import io
import csv
+import io
+
import xlsxwriter
-from funcy import rpartial, project
from dateutil.parser import isoparse as parse_date
-from redash.utils import json_loads, UnicodeWriter
-from redash.query_runner import TYPE_BOOLEAN, TYPE_DATE, TYPE_DATETIME
+from funcy import project, rpartial
+
from redash.authentication.org_resolving import current_org
+from redash.query_runner import TYPE_BOOLEAN, TYPE_DATE, TYPE_DATETIME
def _convert_format(fmt):
diff --git a/redash/settings/__init__.py b/redash/settings/__init__.py
index 0986a6de53..a0033c7d48 100644
--- a/redash/settings/__init__.py
+++ b/redash/settings/__init__.py
@@ -1,24 +1,23 @@
-import os
import importlib
+import os
import ssl
-from funcy import distinct, remove
+
from flask_talisman import talisman
+from funcy import distinct, remove
-from .helpers import (
- fix_assets_path,
+from redash.settings.helpers import (
+ add_decode_responses_to_redis_url,
array_from_string,
- parse_boolean,
+ cast_int_or_default,
+ fix_assets_path,
int_or_none,
+ parse_boolean,
set_from_string,
- add_decode_responses_to_redis_url,
- cast_int_or_default
)
-from .organization import DATE_FORMAT, TIME_FORMAT # noqa
+from redash.settings.organization import DATE_FORMAT, TIME_FORMAT # noqa
# _REDIS_URL is the unchanged REDIS_URL we get from env vars, to be used later with RQ
-_REDIS_URL = os.environ.get(
- "REDASH_REDIS_URL", os.environ.get("REDIS_URL", "redis://localhost:6379/0")
-)
+_REDIS_URL = os.environ.get("REDASH_REDIS_URL", os.environ.get("REDIS_URL", "redis://localhost:6379/0"))
# This is the one to use for Redash' own connection:
REDIS_URL = add_decode_responses_to_redis_url(_REDIS_URL)
PROXIES_COUNT = int(os.environ.get("REDASH_PROXIES_COUNT", "1"))
@@ -39,34 +38,27 @@
SQLALCHEMY_MAX_OVERFLOW = int_or_none(os.environ.get("SQLALCHEMY_MAX_OVERFLOW"))
SQLALCHEMY_POOL_SIZE = int_or_none(os.environ.get("SQLALCHEMY_POOL_SIZE"))
-SQLALCHEMY_DISABLE_POOL = parse_boolean(
- os.environ.get("SQLALCHEMY_DISABLE_POOL", "false")
-)
-SQLALCHEMY_ENABLE_POOL_PRE_PING = parse_boolean(
- os.environ.get("SQLALCHEMY_ENABLE_POOL_PRE_PING", "false")
-)
+SQLALCHEMY_DISABLE_POOL = parse_boolean(os.environ.get("SQLALCHEMY_DISABLE_POOL", "false"))
+SQLALCHEMY_ENABLE_POOL_PRE_PING = parse_boolean(os.environ.get("SQLALCHEMY_ENABLE_POOL_PRE_PING", "false"))
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False
RQ_REDIS_URL = os.environ.get("RQ_REDIS_URL", _REDIS_URL)
# The following enables periodic job (every 5 minutes) of removing unused query results.
-QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(
- os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true")
-)
-QUERY_RESULTS_CLEANUP_COUNT = int(
- os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_COUNT", "100")
-)
-QUERY_RESULTS_CLEANUP_MAX_AGE = int(
- os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_MAX_AGE", "7")
-)
+QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true"))
+QUERY_RESULTS_CLEANUP_COUNT = int(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_COUNT", "100"))
+QUERY_RESULTS_CLEANUP_MAX_AGE = int(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_MAX_AGE", "7"))
+
+QUERY_RESULTS_EXPIRED_TTL_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_EXPIRED_TTL_ENABLED", "false"))
+# default set query results expired ttl 86400 seconds
+QUERY_RESULTS_EXPIRED_TTL = int(os.environ.get("REDASH_QUERY_RESULTS_EXPIRED_TTL", "86400"))
SCHEMAS_REFRESH_SCHEDULE = int(os.environ.get("REDASH_SCHEMAS_REFRESH_SCHEDULE", 30))
+SCHEMAS_REFRESH_TIMEOUT = int(os.environ.get("REDASH_SCHEMAS_REFRESH_TIMEOUT", 300))
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key")
-INVITATION_TOKEN_MAX_AGE = int(
- os.environ.get("REDASH_INVITATION_TOKEN_MAX_AGE", 60 * 60 * 24 * 7)
-)
+INVITATION_TOKEN_MAX_AGE = int(os.environ.get("REDASH_INVITATION_TOKEN_MAX_AGE", 60 * 60 * 24 * 7))
# The secret key to use in the Flask app for various cryptographic features
SECRET_KEY = os.environ.get("REDASH_COOKIE_SECRET")
@@ -79,43 +71,27 @@
# Whether and how to redirect non-HTTP requests to HTTPS. Disabled by default.
ENFORCE_HTTPS = parse_boolean(os.environ.get("REDASH_ENFORCE_HTTPS", "false"))
-ENFORCE_HTTPS_PERMANENT = parse_boolean(
- os.environ.get("REDASH_ENFORCE_HTTPS_PERMANENT", "false")
-)
+ENFORCE_HTTPS_PERMANENT = parse_boolean(os.environ.get("REDASH_ENFORCE_HTTPS_PERMANENT", "false"))
# Whether file downloads are enforced or not.
ENFORCE_FILE_SAVE = parse_boolean(os.environ.get("REDASH_ENFORCE_FILE_SAVE", "true"))
# Whether api calls using the json query runner will block private addresses
-ENFORCE_PRIVATE_ADDRESS_BLOCK = parse_boolean(
- os.environ.get("REDASH_ENFORCE_PRIVATE_IP_BLOCK", "true")
-)
+ENFORCE_PRIVATE_ADDRESS_BLOCK = parse_boolean(os.environ.get("REDASH_ENFORCE_PRIVATE_IP_BLOCK", "true"))
# Whether to use secure cookies by default.
-COOKIES_SECURE = parse_boolean(
- os.environ.get("REDASH_COOKIES_SECURE", str(ENFORCE_HTTPS))
-)
+COOKIES_SECURE = parse_boolean(os.environ.get("REDASH_COOKIES_SECURE", str(ENFORCE_HTTPS)))
# Whether the session cookie is set to secure.
-SESSION_COOKIE_SECURE = parse_boolean(
- os.environ.get("REDASH_SESSION_COOKIE_SECURE") or str(COOKIES_SECURE)
-)
+SESSION_COOKIE_SECURE = parse_boolean(os.environ.get("REDASH_SESSION_COOKIE_SECURE") or str(COOKIES_SECURE))
# Whether the session cookie is set HttpOnly.
-SESSION_COOKIE_HTTPONLY = parse_boolean(
- os.environ.get("REDASH_SESSION_COOKIE_HTTPONLY", "true")
-)
+SESSION_COOKIE_HTTPONLY = parse_boolean(os.environ.get("REDASH_SESSION_COOKIE_HTTPONLY", "true"))
SESSION_EXPIRY_TIME = int(os.environ.get("REDASH_SESSION_EXPIRY_TIME", 60 * 60 * 6))
# Whether the session cookie is set to secure.
-REMEMBER_COOKIE_SECURE = parse_boolean(
- os.environ.get("REDASH_REMEMBER_COOKIE_SECURE") or str(COOKIES_SECURE)
-)
+REMEMBER_COOKIE_SECURE = parse_boolean(os.environ.get("REDASH_REMEMBER_COOKIE_SECURE") or str(COOKIES_SECURE))
# Whether the remember cookie is set HttpOnly.
-REMEMBER_COOKIE_HTTPONLY = parse_boolean(
- os.environ.get("REDASH_REMEMBER_COOKIE_HTTPONLY", "true")
-)
+REMEMBER_COOKIE_HTTPONLY = parse_boolean(os.environ.get("REDASH_REMEMBER_COOKIE_HTTPONLY", "true"))
# The amount of time before the remember cookie expires.
-REMEMBER_COOKIE_DURATION = int(
- os.environ.get("REDASH_REMEMBER_COOKIE_DURATION", 60 * 60 * 24 * 31)
-)
+REMEMBER_COOKIE_DURATION = int(os.environ.get("REDASH_REMEMBER_COOKIE_DURATION", 60 * 60 * 24 * 31))
# Doesn't set X-Frame-Options by default since it's highly dependent
# on the specific deployment.
@@ -127,14 +103,10 @@
# Whether and how to send Strict-Transport-Security response headers.
# See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Strict-Transport-Security
# for more information.
-HSTS_ENABLED = parse_boolean(
- os.environ.get("REDASH_HSTS_ENABLED") or str(ENFORCE_HTTPS)
-)
+HSTS_ENABLED = parse_boolean(os.environ.get("REDASH_HSTS_ENABLED") or str(ENFORCE_HTTPS))
HSTS_PRELOAD = parse_boolean(os.environ.get("REDASH_HSTS_PRELOAD", "false"))
HSTS_MAX_AGE = int(os.environ.get("REDASH_HSTS_MAX_AGE", talisman.ONE_YEAR_IN_SECS))
-HSTS_INCLUDE_SUBDOMAINS = parse_boolean(
- os.environ.get("REDASH_HSTS_INCLUDE_SUBDOMAINS", "false")
-)
+HSTS_INCLUDE_SUBDOMAINS = parse_boolean(os.environ.get("REDASH_HSTS_INCLUDE_SUBDOMAINS", "false"))
# Whether and how to send Content-Security-Policy response headers.
# See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
@@ -147,28 +119,22 @@
"REDASH_CONTENT_SECURITY_POLICY",
"default-src 'self'; style-src 'self' 'unsafe-inline'; script-src 'self' 'unsafe-eval'; font-src 'self' data:; img-src 'self' http: https: data: blob:; object-src 'none'; frame-ancestors 'none'; frame-src redash.io;",
)
-CONTENT_SECURITY_POLICY_REPORT_URI = os.environ.get(
- "REDASH_CONTENT_SECURITY_POLICY_REPORT_URI", ""
-)
+CONTENT_SECURITY_POLICY_REPORT_URI = os.environ.get("REDASH_CONTENT_SECURITY_POLICY_REPORT_URI", "")
CONTENT_SECURITY_POLICY_REPORT_ONLY = parse_boolean(
os.environ.get("REDASH_CONTENT_SECURITY_POLICY_REPORT_ONLY", "false")
)
-CONTENT_SECURITY_POLICY_NONCE_IN = array_from_string(
- os.environ.get("REDASH_CONTENT_SECURITY_POLICY_NONCE_IN", "")
-)
+CONTENT_SECURITY_POLICY_NONCE_IN = array_from_string(os.environ.get("REDASH_CONTENT_SECURITY_POLICY_NONCE_IN", ""))
# Whether and how to send Referrer-Policy response headers. Defaults to
# 'strict-origin-when-cross-origin'.
# See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy
# for more information.
-REFERRER_POLICY = os.environ.get(
- "REDASH_REFERRER_POLICY", "strict-origin-when-cross-origin"
-)
+REFERRER_POLICY = os.environ.get("REDASH_REFERRER_POLICY", "strict-origin-when-cross-origin")
# Whether and how to send Feature-Policy response headers. Defaults to
# an empty value.
# See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Feature-Policy
# for more information.
-FEATURE_POLICY = os.environ.get("REDASH_REFERRER_POLICY", "")
+FEATURE_POLICY = os.environ.get("REDASH_FEATURE_POLICY", "")
MULTI_ORG = parse_boolean(os.environ.get("REDASH_MULTI_ORG", "false"))
@@ -210,12 +176,8 @@
# If you also set the organization setting auth_password_login_enabled to false,
# then your authentication will be seamless. Otherwise a link will be presented
# on the login page to trigger remote user auth.
-REMOTE_USER_LOGIN_ENABLED = parse_boolean(
- os.environ.get("REDASH_REMOTE_USER_LOGIN_ENABLED", "false")
-)
-REMOTE_USER_HEADER = os.environ.get(
- "REDASH_REMOTE_USER_HEADER", "X-Forwarded-Remote-User"
-)
+REMOTE_USER_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_REMOTE_USER_LOGIN_ENABLED", "false"))
+REMOTE_USER_HEADER = os.environ.get("REDASH_REMOTE_USER_HEADER", "X-Forwarded-Remote-User")
# If the organization setting auth_password_login_enabled is not false, then users will still be
# able to login through Redash instead of the LDAP server
@@ -234,36 +196,22 @@
LDAP_DISPLAY_NAME_KEY = os.environ.get("REDASH_LDAP_DISPLAY_NAME_KEY", "displayName")
LDAP_EMAIL_KEY = os.environ.get("REDASH_LDAP_EMAIL_KEY", "mail")
# Prompt that should be shown above username/email field.
-LDAP_CUSTOM_USERNAME_PROMPT = os.environ.get(
- "REDASH_LDAP_CUSTOM_USERNAME_PROMPT", "LDAP/AD/SSO username:"
-)
+LDAP_CUSTOM_USERNAME_PROMPT = os.environ.get("REDASH_LDAP_CUSTOM_USERNAME_PROMPT", "LDAP/AD/SSO username:")
# LDAP Search DN TEMPLATE (for AD this should be "(sAMAccountName=%(username)s)"")
-LDAP_SEARCH_TEMPLATE = os.environ.get(
- "REDASH_LDAP_SEARCH_TEMPLATE", "(cn=%(username)s)"
-)
+LDAP_SEARCH_TEMPLATE = os.environ.get("REDASH_LDAP_SEARCH_TEMPLATE", "(cn=%(username)s)")
# The schema to bind to (ex. cn=users,dc=ORG,dc=local)
-LDAP_SEARCH_DN = os.environ.get(
- "REDASH_LDAP_SEARCH_DN", os.environ.get("REDASH_SEARCH_DN")
-)
+LDAP_SEARCH_DN = os.environ.get("REDASH_LDAP_SEARCH_DN", os.environ.get("REDASH_SEARCH_DN"))
-STATIC_ASSETS_PATH = fix_assets_path(
- os.environ.get("REDASH_STATIC_ASSETS_PATH", "../client/dist/")
-)
-FLASK_TEMPLATE_PATH = fix_assets_path(
- os.environ.get("REDASH_FLASK_TEMPLATE_PATH", STATIC_ASSETS_PATH)
-)
+STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../client/dist/"))
+FLASK_TEMPLATE_PATH = fix_assets_path(os.environ.get("REDASH_FLASK_TEMPLATE_PATH", STATIC_ASSETS_PATH))
# Time limit (in seconds) for scheduled queries. Set this to -1 to execute without a time limit.
-SCHEDULED_QUERY_TIME_LIMIT = int(
- os.environ.get("REDASH_SCHEDULED_QUERY_TIME_LIMIT", -1)
-)
+SCHEDULED_QUERY_TIME_LIMIT = int(os.environ.get("REDASH_SCHEDULED_QUERY_TIME_LIMIT", -1))
# Time limit (in seconds) for adhoc queries. Set this to -1 to execute without a time limit.
ADHOC_QUERY_TIME_LIMIT = int(os.environ.get("REDASH_ADHOC_QUERY_TIME_LIMIT", -1))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 12))
-JOB_DEFAULT_FAILURE_TTL = int(
- os.environ.get("REDASH_JOB_DEFAULT_FAILURE_TTL", 7 * 24 * 60 * 60)
-)
+JOB_DEFAULT_FAILURE_TTL = int(os.environ.get("REDASH_JOB_DEFAULT_FAILURE_TTL", 7 * 24 * 60 * 60))
LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
LOG_STDOUT = parse_boolean(os.environ.get("REDASH_LOG_STDOUT", "false"))
@@ -290,9 +238,7 @@
MAIL_PASSWORD = os.environ.get("REDASH_MAIL_PASSWORD", None)
MAIL_DEFAULT_SENDER = os.environ.get("REDASH_MAIL_DEFAULT_SENDER", None)
MAIL_MAX_EMAILS = os.environ.get("REDASH_MAIL_MAX_EMAILS", None)
-MAIL_ASCII_ATTACHMENTS = parse_boolean(
- os.environ.get("REDASH_MAIL_ASCII_ATTACHMENTS", "false")
-)
+MAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get("REDASH_MAIL_ASCII_ATTACHMENTS", "false"))
def email_server_is_configured():
@@ -301,15 +247,15 @@ def email_server_is_configured():
HOST = os.environ.get("REDASH_HOST", "")
-SEND_FAILURE_EMAIL_INTERVAL = int(
- os.environ.get("REDASH_SEND_FAILURE_EMAIL_INTERVAL", 60)
-)
-MAX_FAILURE_REPORTS_PER_QUERY = int(
- os.environ.get("REDASH_MAX_FAILURE_REPORTS_PER_QUERY", 100)
-)
+SEND_FAILURE_EMAIL_INTERVAL = int(os.environ.get("REDASH_SEND_FAILURE_EMAIL_INTERVAL", 60))
+MAX_FAILURE_REPORTS_PER_QUERY = int(os.environ.get("REDASH_MAX_FAILURE_REPORTS_PER_QUERY", 100))
ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE = os.environ.get(
- "REDASH_ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE", "({state}) {alert_name}"
+ "REDASH_ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE", "Alert: {alert_name} changed status to {state}"
+)
+
+REDASH_ALERTS_DEFAULT_MAIL_BODY_TEMPLATE_FILE = os.environ.get(
+ "REDASH_ALERTS_DEFAULT_MAIL_BODY_TEMPLATE_FILE", fix_assets_path("templates/emails/alert.html")
)
# How many requests are allowed per IP to the login page before
@@ -324,18 +270,12 @@ def email_server_is_configured():
# CORS settings for the Query Result API (and possibly future external APIs).
# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
# to the calling domain (or domains in a comma separated list).
-ACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(
- os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN", "")
-)
+ACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN", ""))
ACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(
os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS", "false")
)
-ACCESS_CONTROL_REQUEST_METHOD = os.environ.get(
- "REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD", "GET, POST, PUT"
-)
-ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get(
- "REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type"
-)
+ACCESS_CONTROL_REQUEST_METHOD = os.environ.get("REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD", "GET, POST, PUT")
+ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type")
# Query Runners
default_query_runners = [
@@ -349,31 +289,33 @@ def email_server_is_configured():
"redash.query_runner.pg",
"redash.query_runner.url",
"redash.query_runner.influx_db",
+ "redash.query_runner.influx_db_v2",
"redash.query_runner.elasticsearch",
+ "redash.query_runner.elasticsearch2",
"redash.query_runner.amazon_elasticsearch",
"redash.query_runner.trino",
"redash.query_runner.presto",
+ "redash.query_runner.pinot",
"redash.query_runner.databricks",
"redash.query_runner.hive_ds",
"redash.query_runner.impala_ds",
"redash.query_runner.vertica",
"redash.query_runner.clickhouse",
+ "redash.query_runner.tinybird",
"redash.query_runner.yandex_metrica",
+ "redash.query_runner.yandex_disk",
"redash.query_runner.rockset",
"redash.query_runner.treasuredata",
"redash.query_runner.sqlite",
- "redash.query_runner.dynamodb_sql",
"redash.query_runner.mssql",
"redash.query_runner.mssql_odbc",
"redash.query_runner.memsql_ds",
- "redash.query_runner.mapd",
"redash.query_runner.jql",
"redash.query_runner.google_analytics",
"redash.query_runner.axibase_tsd",
"redash.query_runner.salesforce",
"redash.query_runner.query_results",
"redash.query_runner.prometheus",
- "redash.query_runner.qubole",
"redash.query_runner.db2",
"redash.query_runner.druid",
"redash.query_runner.kylin",
@@ -398,12 +340,8 @@ def email_server_is_configured():
enabled_query_runners = array_from_string(
os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join(default_query_runners))
)
-additional_query_runners = array_from_string(
- os.environ.get("REDASH_ADDITIONAL_QUERY_RUNNERS", "")
-)
-disabled_query_runners = array_from_string(
- os.environ.get("REDASH_DISABLED_QUERY_RUNNERS", "")
-)
+additional_query_runners = array_from_string(os.environ.get("REDASH_ADDITIONAL_QUERY_RUNNERS", ""))
+disabled_query_runners = array_from_string(os.environ.get("REDASH_DISABLED_QUERY_RUNNERS", ""))
QUERY_RUNNERS = remove(
set(disabled_query_runners),
@@ -419,42 +357,34 @@ def email_server_is_configured():
"redash.destinations.email",
"redash.destinations.slack",
"redash.destinations.webhook",
- "redash.destinations.hipchat",
+ "redash.destinations.discord",
"redash.destinations.mattermost",
"redash.destinations.chatwork",
"redash.destinations.pagerduty",
"redash.destinations.hangoutschat",
+ "redash.destinations.microsoft_teams_webhook",
+ "redash.destinations.asana",
+ "redash.destinations.webex",
+ "redash.destinations.datadog",
]
-enabled_destinations = array_from_string(
- os.environ.get("REDASH_ENABLED_DESTINATIONS", ",".join(default_destinations))
-)
-additional_destinations = array_from_string(
- os.environ.get("REDASH_ADDITIONAL_DESTINATIONS", "")
-)
+enabled_destinations = array_from_string(os.environ.get("REDASH_ENABLED_DESTINATIONS", ",".join(default_destinations)))
+additional_destinations = array_from_string(os.environ.get("REDASH_ADDITIONAL_DESTINATIONS", ""))
DESTINATIONS = distinct(enabled_destinations + additional_destinations)
-EVENT_REPORTING_WEBHOOKS = array_from_string(
- os.environ.get("REDASH_EVENT_REPORTING_WEBHOOKS", "")
-)
+EVENT_REPORTING_WEBHOOKS = array_from_string(os.environ.get("REDASH_EVENT_REPORTING_WEBHOOKS", ""))
# Support for Sentry (https://getsentry.com/). Just set your Sentry DSN to enable it:
SENTRY_DSN = os.environ.get("REDASH_SENTRY_DSN", "")
SENTRY_ENVIRONMENT = os.environ.get("REDASH_SENTRY_ENVIRONMENT")
# Client side toggles:
-ALLOW_SCRIPTS_IN_USER_INPUT = parse_boolean(
- os.environ.get("REDASH_ALLOW_SCRIPTS_IN_USER_INPUT", "false")
-)
+ALLOW_SCRIPTS_IN_USER_INPUT = parse_boolean(os.environ.get("REDASH_ALLOW_SCRIPTS_IN_USER_INPUT", "false"))
DASHBOARD_REFRESH_INTERVALS = list(
map(
int,
- array_from_string(
- os.environ.get(
- "REDASH_DASHBOARD_REFRESH_INTERVALS", "60,300,600,1800,3600,43200,86400"
- )
- ),
+ array_from_string(os.environ.get("REDASH_DASHBOARD_REFRESH_INTERVALS", "60,300,600,1800,3600,43200,86400")),
)
)
QUERY_REFRESH_INTERVALS = list(
@@ -478,22 +408,13 @@ def email_server_is_configured():
TABLE_CELL_MAX_JSON_SIZE = int(os.environ.get("REDASH_TABLE_CELL_MAX_JSON_SIZE", 50000))
# Features:
-VERSION_CHECK = parse_boolean(os.environ.get("REDASH_VERSION_CHECK", "true"))
-FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(
- os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false")
-)
-FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(
- os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true")
-)
+FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false"))
+FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true"))
FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS = parse_boolean(
- os.environ.get("REDASH_FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS", "false")
-)
-FEATURE_AUTO_PUBLISH_NAMED_QUERIES = parse_boolean(
- os.environ.get("REDASH_FEATURE_AUTO_PUBLISH_NAMED_QUERIES", "true")
-)
-FEATURE_EXTENDED_ALERT_OPTIONS = parse_boolean(
- os.environ.get("REDASH_FEATURE_EXTENDED_ALERT_OPTIONS", "false")
+ os.environ.get("REDASH_FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS", "true")
)
+FEATURE_AUTO_PUBLISH_NAMED_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_AUTO_PUBLISH_NAMED_QUERIES", "true"))
+FEATURE_EXTENDED_ALERT_OPTIONS = parse_boolean(os.environ.get("REDASH_FEATURE_EXTENDED_ALERT_OPTIONS", "false"))
# BigQuery
BIGQUERY_HTTP_TIMEOUT = int(os.environ.get("REDASH_BIGQUERY_HTTP_TIMEOUT", "600"))
@@ -501,9 +422,7 @@ def email_server_is_configured():
# Allow Parameters in Embeds
# WARNING: Deprecated!
# See https://discuss.redash.io/t/support-for-parameters-in-embedded-visualizations/3337 for more details.
-ALLOW_PARAMETERS_IN_EMBEDS = parse_boolean(
- os.environ.get("REDASH_ALLOW_PARAMETERS_IN_EMBEDS", "false")
-)
+ALLOW_PARAMETERS_IN_EMBEDS = parse_boolean(os.environ.get("REDASH_ALLOW_PARAMETERS_IN_EMBEDS", "false"))
# Enhance schema fetching
SCHEMA_RUN_TABLE_SIZE_CALCULATIONS = parse_boolean(
@@ -513,9 +432,7 @@ def email_server_is_configured():
# kylin
KYLIN_OFFSET = int(os.environ.get("REDASH_KYLIN_OFFSET", 0))
KYLIN_LIMIT = int(os.environ.get("REDASH_KYLIN_LIMIT", 50000))
-KYLIN_ACCEPT_PARTIAL = parse_boolean(
- os.environ.get("REDASH_KYLIN_ACCEPT_PARTIAL", "false")
-)
+KYLIN_ACCEPT_PARTIAL = parse_boolean(os.environ.get("REDASH_KYLIN_ACCEPT_PARTIAL", "false"))
# sqlparse
SQLPARSE_FORMAT_OPTIONS = {
@@ -524,15 +441,11 @@ def email_server_is_configured():
}
# requests
-REQUESTS_ALLOW_REDIRECTS = parse_boolean(
- os.environ.get("REDASH_REQUESTS_ALLOW_REDIRECTS", "false")
-)
+REQUESTS_ALLOW_REDIRECTS = parse_boolean(os.environ.get("REDASH_REQUESTS_ALLOW_REDIRECTS", "false"))
# Enforces CSRF token validation on API requests.
# This is turned off by default to avoid breaking any existing deployments but it is highly recommended to turn this toggle on to prevent CSRF attacks.
-ENFORCE_CSRF = parse_boolean(
- os.environ.get("REDASH_ENFORCE_CSRF", "false")
-)
+ENFORCE_CSRF = parse_boolean(os.environ.get("REDASH_ENFORCE_CSRF", "false"))
# Databricks
diff --git a/redash/settings/dynamic_settings.py b/redash/settings/dynamic_settings.py
index 1f15712238..706cf17531 100644
--- a/redash/settings/dynamic_settings.py
+++ b/redash/settings/dynamic_settings.py
@@ -1,5 +1,6 @@
from collections import defaultdict
+
# Replace this method with your own implementation in case you want to limit the time limit on certain queries or users.
def query_time_limit(is_scheduled, user_id, org_id):
from redash import settings
@@ -58,6 +59,7 @@ def database_key_definitions(default):
return definitions
-# Since you can define custom primary key types using `database_key_definitions`, you may want to load certain extensions when creating the database.
+
+# Since you can define custom primary key types using `database_key_definitions`, you may want to load certain extensions when creating the database.
# To do so, simply add the name of the extension you'd like to load to this list.
-database_extensions = []
\ No newline at end of file
+database_extensions = []
diff --git a/redash/settings/helpers.py b/redash/settings/helpers.py
index 3fe95eecaa..1b5a5693c9 100644
--- a/redash/settings/helpers.py
+++ b/redash/settings/helpers.py
@@ -29,12 +29,14 @@ def parse_boolean(s):
else:
raise ValueError("Invalid boolean value %r" % s)
+
def cast_int_or_default(val, default=None):
try:
return int(val)
except (ValueError, TypeError):
return default
+
def int_or_none(value):
if value is None:
return value
diff --git a/redash/settings/organization.py b/redash/settings/organization.py
index eb82a94c6b..947bc31ea5 100644
--- a/redash/settings/organization.py
+++ b/redash/settings/organization.py
@@ -1,18 +1,15 @@
import os
+
from .helpers import parse_boolean
if os.environ.get("REDASH_SAML_LOCAL_METADATA_PATH") is not None:
print("DEPRECATION NOTICE:\n")
- print(
- "SAML_LOCAL_METADATA_PATH is no longer supported. Only URL metadata is supported now, please update"
- )
+ print("SAML_LOCAL_METADATA_PATH is no longer supported. Only URL metadata is supported now, please update")
print("your configuration and reload.")
raise SystemExit(1)
-PASSWORD_LOGIN_ENABLED = parse_boolean(
- os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true")
-)
+PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
SAML_LOGIN_TYPE = os.environ.get("REDASH_SAML_AUTH_TYPE", "")
SAML_METADATA_URL = os.environ.get("REDASH_SAML_METADATA_URL", "")
@@ -20,39 +17,34 @@
SAML_NAMEID_FORMAT = os.environ.get("REDASH_SAML_NAMEID_FORMAT", "")
SAML_SSO_URL = os.environ.get("REDASH_SAML_SSO_URL", "")
SAML_X509_CERT = os.environ.get("REDASH_SAML_X509_CERT", "")
-SAML_LOGIN_ENABLED = SAML_SSO_URL != "" and SAML_METADATA_URL != ""
+SAML_SP_SETTINGS = os.environ.get("REDASH_SAML_SP_SETTINGS", "")
+if SAML_LOGIN_TYPE == "static":
+ SAML_LOGIN_ENABLED = SAML_SSO_URL != "" and SAML_METADATA_URL != ""
+else:
+ SAML_LOGIN_ENABLED = SAML_METADATA_URL != ""
DATE_FORMAT = os.environ.get("REDASH_DATE_FORMAT", "DD/MM/YY")
TIME_FORMAT = os.environ.get("REDASH_TIME_FORMAT", "HH:mm")
INTEGER_FORMAT = os.environ.get("REDASH_INTEGER_FORMAT", "0,0")
FLOAT_FORMAT = os.environ.get("REDASH_FLOAT_FORMAT", "0,0.00")
-MULTI_BYTE_SEARCH_ENABLED = parse_boolean(
- os.environ.get("MULTI_BYTE_SEARCH_ENABLED", "false")
-)
+MULTI_BYTE_SEARCH_ENABLED = parse_boolean(os.environ.get("MULTI_BYTE_SEARCH_ENABLED", "false"))
JWT_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_JWT_LOGIN_ENABLED", "false"))
JWT_AUTH_ISSUER = os.environ.get("REDASH_JWT_AUTH_ISSUER", "")
JWT_AUTH_PUBLIC_CERTS_URL = os.environ.get("REDASH_JWT_AUTH_PUBLIC_CERTS_URL", "")
JWT_AUTH_AUDIENCE = os.environ.get("REDASH_JWT_AUTH_AUDIENCE", "")
-JWT_AUTH_ALGORITHMS = os.environ.get(
- "REDASH_JWT_AUTH_ALGORITHMS", "HS256,RS256,ES256"
-).split(",")
+JWT_AUTH_ALGORITHMS = os.environ.get("REDASH_JWT_AUTH_ALGORITHMS", "HS256,RS256,ES256").split(",")
JWT_AUTH_COOKIE_NAME = os.environ.get("REDASH_JWT_AUTH_COOKIE_NAME", "")
JWT_AUTH_HEADER_NAME = os.environ.get("REDASH_JWT_AUTH_HEADER_NAME", "")
-FEATURE_SHOW_PERMISSIONS_CONTROL = parse_boolean(
- os.environ.get("REDASH_FEATURE_SHOW_PERMISSIONS_CONTROL", "false")
-)
+FEATURE_SHOW_PERMISSIONS_CONTROL = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_PERMISSIONS_CONTROL", "false"))
SEND_EMAIL_ON_FAILED_SCHEDULED_QUERIES = parse_boolean(
os.environ.get("REDASH_SEND_EMAIL_ON_FAILED_SCHEDULED_QUERIES", "false")
)
HIDE_PLOTLY_MODE_BAR = parse_boolean(os.environ.get("HIDE_PLOTLY_MODE_BAR", "false"))
-DISABLE_PUBLIC_URLS = parse_boolean(
- os.environ.get("REDASH_DISABLE_PUBLIC_URLS", "false")
-)
+DISABLE_PUBLIC_URLS = parse_boolean(os.environ.get("REDASH_DISABLE_PUBLIC_URLS", "false"))
settings = {
- "beacon_consent": None,
"auth_password_login_enabled": PASSWORD_LOGIN_ENABLED,
"auth_saml_enabled": SAML_LOGIN_ENABLED,
"auth_saml_type": SAML_LOGIN_TYPE,
@@ -61,6 +53,7 @@
"auth_saml_nameid_format": SAML_NAMEID_FORMAT,
"auth_saml_sso_url": SAML_SSO_URL,
"auth_saml_x509_cert": SAML_X509_CERT,
+ "auth_saml_sp_settings": SAML_SP_SETTINGS,
"date_format": DATE_FORMAT,
"time_format": TIME_FORMAT,
"integer_format": INTEGER_FORMAT,
diff --git a/redash/tasks/__init__.py b/redash/tasks/__init__.py
index 4f68fe0ef9..d7cb5623d5 100644
--- a/redash/tasks/__init__.py
+++ b/redash/tasks/__init__.py
@@ -1,28 +1,30 @@
-from .general import (
+from rq.connections import pop_connection, push_connection
+
+from redash import rq_redis_connection
+from redash.tasks.alerts import check_alerts_for_query
+from redash.tasks.failure_report import send_aggregated_errors
+from redash.tasks.general import (
record_event,
- version_check,
send_mail,
sync_user_details,
)
-from .queries import (
+from redash.tasks.queries import (
+ cleanup_query_results,
+ empty_schedules,
enqueue_query,
execute_query,
refresh_queries,
refresh_schemas,
- cleanup_query_results,
- empty_schedules,
remove_ghost_locks,
)
-from .alerts import check_alerts_for_query
-from .failure_report import send_aggregated_errors
-from .worker import Worker, Queue, Job
-from .schedule import rq_scheduler, schedule_periodic_jobs, periodic_job_definitions
-
-from redash import rq_redis_connection
-from rq.connections import push_connection, pop_connection
+from redash.tasks.schedule import (
+ periodic_job_definitions,
+ rq_scheduler,
+ schedule_periodic_jobs,
+)
+from redash.tasks.worker import Job, Queue, Worker
def init_app(app):
app.before_request(lambda: push_connection(rq_redis_connection))
app.teardown_request(lambda _: pop_connection())
-
diff --git a/redash/tasks/alerts.py b/redash/tasks/alerts.py
index 9fea0e4e22..2e8093a0ac 100644
--- a/redash/tasks/alerts.py
+++ b/redash/tasks/alerts.py
@@ -1,38 +1,32 @@
-from flask import current_app
import datetime
-from redash.worker import job, get_job_logger
-from redash import models, utils
+from flask import current_app
+
+from redash import models, utils
+from redash.worker import get_job_logger, job
logger = get_job_logger(__name__)
-def notify_subscriptions(alert, new_state):
+def notify_subscriptions(alert, new_state, metadata):
host = utils.base_url(alert.query_rel.org)
for subscription in alert.subscriptions:
try:
- subscription.notify(
- alert, alert.query_rel, subscription.user, new_state, current_app, host
- )
- except Exception as e:
+ subscription.notify(alert, alert.query_rel, subscription.user, new_state, current_app, host, metadata)
+ except Exception:
logger.exception("Error with processing destination")
def should_notify(alert, new_state):
passed_rearm_threshold = False
if alert.rearm and alert.last_triggered_at:
- passed_rearm_threshold = (
- alert.last_triggered_at + datetime.timedelta(seconds=alert.rearm)
- < utils.utcnow()
- )
+ passed_rearm_threshold = alert.last_triggered_at + datetime.timedelta(seconds=alert.rearm) < utils.utcnow()
- return new_state != alert.state or (
- alert.state == models.Alert.TRIGGERED_STATE and passed_rearm_threshold
- )
+ return new_state != alert.state or (alert.state == models.Alert.TRIGGERED_STATE and passed_rearm_threshold)
@job("default", timeout=300)
-def check_alerts_for_query(query_id):
+def check_alerts_for_query(query_id, metadata):
logger.debug("Checking query %d for alerts", query_id)
query = models.Query.query.get(query_id)
@@ -49,17 +43,12 @@ def check_alerts_for_query(query_id):
alert.last_triggered_at = utils.utcnow()
models.db.session.commit()
- if (
- old_state == models.Alert.UNKNOWN_STATE
- and new_state == models.Alert.OK_STATE
- ):
- logger.debug(
- "Skipping notification (previous state was unknown and now it's ok)."
- )
+ if old_state == models.Alert.UNKNOWN_STATE and new_state == models.Alert.OK_STATE:
+ logger.debug("Skipping notification (previous state was unknown and now it's ok).")
continue
if alert.muted:
logger.debug("Skipping notification (alert muted).")
continue
- notify_subscriptions(alert, new_state)
+ notify_subscriptions(alert, new_state, metadata)
diff --git a/redash/tasks/databricks.py b/redash/tasks/databricks.py
index 63849d9766..a9051a3825 100644
--- a/redash/tasks/databricks.py
+++ b/redash/tasks/databricks.py
@@ -1,9 +1,7 @@
-from rq.registry import FailedJobRegistry
from redash import models, redis_connection
-from redash.worker import job
from redash.tasks.worker import Queue
from redash.utils import json_dumps
-
+from redash.worker import job
DATABRICKS_REDIS_EXPIRATION_TIME = 3600
@@ -24,14 +22,13 @@ def get_databricks_databases(data_source_id, redis_key):
def get_database_tables_with_columns(data_source_id, database_name, redis_key):
try:
data_source = models.DataSource.get_by_id(data_source_id)
- tables = data_source.query_runner.get_database_tables_with_columns(
- database_name
- )
+ tables = data_source.query_runner.get_database_tables_with_columns(database_name)
# check for tables since it doesn't return an error when the requested database doesn't exist
if tables or redis_connection.exists(redis_key):
redis_connection.set(redis_key, json_dumps(tables))
redis_connection.expire(
- redis_key, DATABRICKS_REDIS_EXPIRATION_TIME,
+ redis_key,
+ DATABRICKS_REDIS_EXPIRATION_TIME,
)
return {"schema": tables, "has_columns": True}
except Exception:
@@ -42,9 +39,7 @@ def get_database_tables_with_columns(data_source_id, database_name, redis_key):
def get_databricks_tables(data_source_id, database_name):
try:
data_source = models.DataSource.get_by_id(data_source_id)
- tables = data_source.query_runner.get_database_tables_with_columns(
- database_name
- )
+ tables = data_source.query_runner.get_database_tables_with_columns(database_name)
return {"schema": tables, "has_columns": False}
except Exception:
return {"error": {"code": 2, "message": "Error retrieving schema."}}
diff --git a/redash/tasks/failure_report.py b/redash/tasks/failure_report.py
index 0339f3107e..4b8de625d8 100644
--- a/redash/tasks/failure_report.py
+++ b/redash/tasks/failure_report.py
@@ -1,9 +1,10 @@
import datetime
import re
from collections import Counter
+
+from redash import models, redis_connection, settings
from redash.tasks.general import send_mail
-from redash import redis_connection, settings, models
-from redash.utils import json_dumps, json_loads, base_url, render_template
+from redash.utils import base_url, json_dumps, json_loads, render_template
from redash.worker import get_job_logger
logger = get_job_logger(__name__)
@@ -53,13 +54,11 @@ def send_failure_report(user_id):
"base_url": base_url(user.org),
}
- subject = "Redash failed to execute {} of your scheduled queries".format(
- len(unique_errors.keys())
- )
+ subject = f"Redash failed to execute {len(unique_errors.keys())} of your scheduled queries"
html, text = [
render_template("emails/failures.{}".format(f), context)
for f in ["html", "txt"]
- ]
+ ] # fmt: skip
send_mail.delay([user.email], subject, html, text)
@@ -68,9 +67,7 @@ def send_failure_report(user_id):
def notify_of_failure(message, query):
subscribed = query.org.get_setting("send_email_on_failed_scheduled_queries")
- exceeded_threshold = (
- query.schedule_failures >= settings.MAX_FAILURE_REPORTS_PER_QUERY
- )
+ exceeded_threshold = query.schedule_failures >= settings.MAX_FAILURE_REPORTS_PER_QUERY
if subscribed and not query.user.is_disabled and not exceeded_threshold:
redis_connection.lpush(
@@ -81,9 +78,7 @@ def notify_of_failure(message, query):
"name": query.name,
"message": message,
"schedule_failures": query.schedule_failures,
- "failed_at": datetime.datetime.utcnow().strftime(
- "%B %d, %Y %I:%M%p UTC"
- ),
+ "failed_at": datetime.datetime.utcnow().strftime("%B %d, %Y %I:%M%p UTC"),
}
),
)
diff --git a/redash/tasks/general.py b/redash/tasks/general.py
index c482d1f450..532cbd1175 100644
--- a/redash/tasks/general.py
+++ b/redash/tasks/general.py
@@ -1,13 +1,11 @@
import requests
-from datetime import datetime
-
from flask_mail import Message
+
from redash import mail, models, settings
from redash.models import users
-from redash.version_check import run_version_check
-from redash.worker import job, get_job_logger
-from redash.tasks.worker import Queue
from redash.query_runner import NotSupported
+from redash.tasks.worker import Queue
+from redash.worker import get_job_logger, job
logger = get_job_logger(__name__)
@@ -31,27 +29,6 @@ def record_event(raw_event):
logger.exception("Failed posting to %s", hook)
-def version_check():
- run_version_check()
-
-
-@job("default")
-def subscribe(form):
- logger.info(
- "Subscribing to: [security notifications=%s], [newsletter=%s]",
- form["security_notifications"],
- form["newsletter"],
- )
- data = {
- "admin_name": form["name"],
- "admin_email": form["email"],
- "org_name": form["org_name"],
- "security_notifications": form["security_notifications"],
- "newsletter": form["newsletter"],
- }
- requests.post("https://beacon.redash.io/subscribe", json=data)
-
-
@job("emails")
def send_mail(to, subject, html, text):
try:
@@ -73,7 +50,7 @@ def test_connection(data_source_id):
return True
-@job("schemas", queue_class=Queue, at_front=True, timeout=300, ttl=90)
+@job("schemas", queue_class=Queue, at_front=True, timeout=settings.SCHEMAS_REFRESH_TIMEOUT, ttl=90)
def get_schema(data_source_id, refresh):
try:
data_source = models.DataSource.get_by_id(data_source_id)
@@ -85,8 +62,8 @@ def get_schema(data_source_id, refresh):
"message": "Data source type does not support retrieving schema",
}
}
- except Exception:
- return {"error": {"code": 2, "message": "Error retrieving schema."}}
+ except Exception as e:
+ return {"error": {"code": 2, "message": "Error retrieving schema", "details": str(e)}}
def sync_user_details():
diff --git a/redash/tasks/queries/__init__.py b/redash/tasks/queries/__init__.py
index dcdc3bc8d9..6ca0132db2 100644
--- a/redash/tasks/queries/__init__.py
+++ b/redash/tasks/queries/__init__.py
@@ -1,8 +1,8 @@
+from .execution import enqueue_query, execute_query
from .maintenance import (
- refresh_queries,
- refresh_schemas,
cleanup_query_results,
empty_schedules,
+ refresh_queries,
+ refresh_schemas,
remove_ghost_locks,
)
-from .execution import execute_query, enqueue_query
diff --git a/redash/tasks/queries/execution.py b/redash/tasks/queries/execution.py
index 2fe592801f..a863903cdb 100644
--- a/redash/tasks/queries/execution.py
+++ b/redash/tasks/queries/execution.py
@@ -1,18 +1,20 @@
import signal
+import sys
import time
-import redis
+from collections import deque
+import redis
from rq import get_current_job
+from rq.exceptions import NoSuchJobError
from rq.job import JobStatus
from rq.timeouts import JobTimeoutException
-from rq.exceptions import NoSuchJobError
from redash import models, redis_connection, settings
from redash.query_runner import InterruptException
-from redash.tasks.worker import Queue, Job
from redash.tasks.alerts import check_alerts_for_query
from redash.tasks.failure_report import track_failure
-from redash.utils import gen_query_hash, json_dumps, utcnow
+from redash.tasks.worker import Job, Queue
+from redash.utils import gen_query_hash, utcnow
from redash.worker import get_job_logger
logger = get_job_logger(__name__)
@@ -27,9 +29,7 @@ def _unlock(query_hash, data_source_id):
redis_connection.delete(_job_lock_id(query_hash, data_source_id))
-def enqueue_query(
- query, data_source, user_id, is_api_key=False, scheduled_query=None, metadata={}
-):
+def enqueue_query(query, data_source, user_id, is_api_key=False, scheduled_query=None, metadata={}):
query_hash = gen_query_hash(query)
logger.info("Inserting job for %s with metadata=%s", query_hash, metadata)
try_count = 0
@@ -57,7 +57,7 @@ def enqueue_query(
if job_complete:
message = "job found is complete (%s)" % status
elif job_cancelled:
- message = "job found has ben cancelled"
+ message = "job found has been cancelled"
except NoSuchJobError:
message = "job found has expired"
job_exists = False
@@ -79,9 +79,7 @@ def enqueue_query(
queue_name = data_source.queue_name
scheduled_query_id = None
- time_limit = settings.dynamic_settings.query_time_limit(
- scheduled_query, user_id, data_source.org_id
- )
+ time_limit = settings.dynamic_settings.query_time_limit(scheduled_query, user_id, data_source.org_id)
metadata["Queue"] = queue_name
queue = Queue(queue_name)
@@ -103,9 +101,7 @@ def enqueue_query(
if not scheduled_query:
enqueue_kwargs["result_ttl"] = settings.JOB_EXPIRY_TIME
- job = queue.enqueue(
- execute_query, query, data_source.id, metadata, **enqueue_kwargs
- )
+ job = queue.enqueue(execute_query, query, data_source.id, metadata, **enqueue_kwargs)
logger.info("[%s] Created new job: %s", query_hash, job.id)
pipe.set(
@@ -118,6 +114,8 @@ def enqueue_query(
except redis.WatchError:
continue
+ finally:
+ pipe.reset()
if not job:
logger.error("[Manager][%s] Failed adding job for query.", query_hash)
@@ -149,10 +147,32 @@ def _resolve_user(user_id, is_api_key, query_id):
return None
-class QueryExecutor(object):
- def __init__(
- self, query, data_source_id, user_id, is_api_key, metadata, is_scheduled_query
- ):
+def _get_size_iterative(dict_obj):
+ """Iteratively finds size of objects in bytes"""
+ seen = set()
+ size = 0
+ objects = deque([dict_obj])
+
+ while objects:
+ current = objects.popleft()
+ if id(current) in seen:
+ continue
+ seen.add(id(current))
+ size += sys.getsizeof(current)
+
+ if isinstance(current, dict):
+ objects.extend(current.keys())
+ objects.extend(current.values())
+ elif hasattr(current, "__dict__"):
+ objects.append(current.__dict__)
+ elif hasattr(current, "__iter__") and not isinstance(current, (str, bytes, bytearray)):
+ objects.extend(current)
+
+ return size
+
+
+class QueryExecutor:
+ def __init__(self, query, data_source_id, user_id, is_api_key, metadata, is_scheduled_query):
self.job = get_current_job()
self.query = query
self.data_source_id = data_source_id
@@ -164,7 +184,7 @@ def __init__(
models.Query.query.get(self.query_id)
if self.query_id and self.query_id != "adhoc"
else None
- )
+ ) # fmt: skip
# Close DB connection to prevent holding a connection for a long time while the query is executing.
models.db.session.close()
@@ -201,7 +221,7 @@ def run(self):
"job=execute_query query_hash=%s ds_id=%d data_length=%s error=[%s]",
self.query_hash,
self.data_source_id,
- data and len(data),
+ data and _get_size_iterative(data),
error,
)
@@ -235,7 +255,7 @@ def run(self):
models.db.session.commit() # make sure that alert sees the latest query result
self._log_progress("checking_alerts")
for query_id in updated_query_ids:
- check_alerts_for_query.delay(query_id)
+ check_alerts_for_query.delay(query_id, self.metadata)
self._log_progress("finished")
result = query_result.id
@@ -252,7 +272,7 @@ def _annotate_query(self, query_runner):
def _log_progress(self, state):
logger.info(
"job=execute_query state=%s query_hash=%s type=%s ds_id=%d "
- "job_id=%s queue=%s query_id=%s username=%s",
+ "job_id=%s queue=%s query_id=%s username=%s", # fmt: skip
state,
self.query_hash,
self.data_source.type,
diff --git a/redash/tasks/queries/maintenance.py b/redash/tasks/queries/maintenance.py
index cd039296e7..bca3168c38 100644
--- a/redash/tasks/queries/maintenance.py
+++ b/redash/tasks/queries/maintenance.py
@@ -2,15 +2,16 @@
import time
from rq.timeouts import JobTimeoutException
+
from redash import models, redis_connection, settings, statsd_client
from redash.models.parameterized_query import (
InvalidParameterError,
QueryDetachedFromDataSourceError,
)
+from redash.monitor import rq_job_ids
from redash.tasks.failure_report import track_failure
from redash.utils import json_dumps, sentry
-from redash.worker import job, get_job_logger
-from redash.monitor import rq_job_ids
+from redash.worker import get_job_logger, job
from .execution import enqueue_query
@@ -56,16 +57,14 @@ def _apply_default_parameters(query):
try:
return query.parameterized.apply(parameters).query
except InvalidParameterError as e:
- error = u"Skipping refresh of {} because of invalid parameters: {}".format(
- query.id, str(e)
- )
+ error = f"Skipping refresh of {query.id} because of invalid parameters: {str(e)}"
track_failure(query, error)
raise
except QueryDetachedFromDataSourceError as e:
error = (
- "Skipping refresh of {} because a related dropdown "
- "query ({}) is unattached to any datasource."
- ).format(query.id, e.query_id)
+ f"Skipping refresh of {query.id} because a related dropdown "
+ f"query ({e.query_id}) is unattached to any datasource."
+ )
track_failure(query, error)
raise
else:
@@ -78,12 +77,11 @@ class RefreshQueriesError(Exception):
def _apply_auto_limit(query_text, query):
should_apply_auto_limit = query.options.get("apply_auto_limit", False)
- return query.data_source.query_runner.apply_auto_limit(
- query_text, should_apply_auto_limit
- )
+ return query.data_source.query_runner.apply_auto_limit(query_text, should_apply_auto_limit)
def refresh_queries():
+ started_at = time.time()
logger.info("Refreshing queries...")
enqueued = []
for query in models.Query.outdated_queries():
@@ -98,7 +96,7 @@ def refresh_queries():
query.data_source,
query.user_id,
scheduled_query=query,
- metadata={"query_id": query.id, "Username": "Scheduled"},
+ metadata={"query_id": query.id, "Username": query.user.get_actual_user()},
)
enqueued.append(query)
except Exception as e:
@@ -108,12 +106,13 @@ def refresh_queries():
sentry.capture_exception(error)
status = {
+ "started_at": started_at,
"outdated_queries_count": len(enqueued),
"last_refresh_at": time.time(),
"query_ids": json_dumps([q.id for q in enqueued]),
}
- redis_connection.hmset("redash:status", status)
+ redis_connection.hset("redash:status", mapping=status)
logger.info("Done refreshing queries: %s" % status)
@@ -132,13 +131,9 @@ def cleanup_query_results():
settings.QUERY_RESULTS_CLEANUP_MAX_AGE,
)
- unused_query_results = models.QueryResult.unused(
- settings.QUERY_RESULTS_CLEANUP_MAX_AGE
- )
+ unused_query_results = models.QueryResult.unused(settings.QUERY_RESULTS_CLEANUP_MAX_AGE)
deleted_count = models.QueryResult.query.filter(
- models.QueryResult.id.in_(
- unused_query_results.limit(settings.QUERY_RESULTS_CLEANUP_COUNT).subquery()
- )
+ models.QueryResult.id.in_(unused_query_results.limit(settings.QUERY_RESULTS_CLEANUP_COUNT).subquery())
).delete(synchronize_session=False)
models.db.session.commit()
logger.info("Deleted %d unused query results.", deleted_count)
@@ -162,33 +157,31 @@ def remove_ghost_locks():
logger.info("Locks found: {}, Locks removed: {}".format(len(locks), count))
-@job("schemas")
+@job("schemas", timeout=settings.SCHEMAS_REFRESH_TIMEOUT)
def refresh_schema(data_source_id):
ds = models.DataSource.get_by_id(data_source_id)
- logger.info(u"task=refresh_schema state=start ds_id=%s", ds.id)
+ logger.info("task=refresh_schema state=start ds_id=%s", ds.id)
start_time = time.time()
try:
ds.get_schema(refresh=True)
logger.info(
- u"task=refresh_schema state=finished ds_id=%s runtime=%.2f",
+ "task=refresh_schema state=finished ds_id=%s runtime=%.2f",
ds.id,
time.time() - start_time,
)
statsd_client.incr("refresh_schema.success")
except JobTimeoutException:
logger.info(
- u"task=refresh_schema state=timeout ds_id=%s runtime=%.2f",
+ "task=refresh_schema state=timeout ds_id=%s runtime=%.2f",
ds.id,
time.time() - start_time,
)
statsd_client.incr("refresh_schema.timeout")
except Exception:
- logger.warning(
- u"Failed refreshing schema for the data source: %s", ds.name, exc_info=1
- )
+ logger.warning("Failed refreshing schema for the data source: %s", ds.name, exc_info=1)
statsd_client.incr("refresh_schema.error")
logger.info(
- u"task=refresh_schema state=failed ds_id=%s runtime=%.2f",
+ "task=refresh_schema state=failed ds_id=%s runtime=%.2f",
ds.id,
time.time() - start_time,
)
@@ -198,34 +191,26 @@ def refresh_schemas():
"""
Refreshes the data sources schemas.
"""
- blacklist = [
- int(ds_id)
- for ds_id in redis_connection.smembers("data_sources:schema:blacklist")
- if ds_id
- ]
+ blacklist = [int(ds_id) for ds_id in redis_connection.smembers("data_sources:schema:blacklist") if ds_id]
global_start_time = time.time()
- logger.info(u"task=refresh_schemas state=start")
+ logger.info("task=refresh_schemas state=start")
for ds in models.DataSource.query:
if ds.paused:
logger.info(
- u"task=refresh_schema state=skip ds_id=%s reason=paused(%s)",
+ "task=refresh_schema state=skip ds_id=%s reason=paused(%s)",
ds.id,
ds.pause_reason,
)
elif ds.id in blacklist:
- logger.info(
- u"task=refresh_schema state=skip ds_id=%s reason=blacklist", ds.id
- )
+ logger.info("task=refresh_schema state=skip ds_id=%s reason=blacklist", ds.id)
elif ds.org.is_disabled:
- logger.info(
- u"task=refresh_schema state=skip ds_id=%s reason=org_disabled", ds.id
- )
+ logger.info("task=refresh_schema state=skip ds_id=%s reason=org_disabled", ds.id)
else:
refresh_schema.delay(ds.id)
logger.info(
- u"task=refresh_schemas state=finish total_runtime=%.2f",
+ "task=refresh_schemas state=finish total_runtime=%.2f",
time.time() - global_start_time,
)
diff --git a/redash/tasks/schedule.py b/redash/tasks/schedule.py
index 5ed4b580e7..fd23a7500b 100644
--- a/redash/tasks/schedule.py
+++ b/redash/tasks/schedule.py
@@ -1,24 +1,22 @@
-from __future__ import absolute_import
-import logging
import hashlib
import json
+import logging
from datetime import datetime, timedelta
from rq.job import Job
from rq_scheduler import Scheduler
-from redash import extensions, settings, rq_redis_connection, statsd_client
-from redash.tasks import (
- sync_user_details,
- refresh_queries,
- remove_ghost_locks,
+from redash import rq_redis_connection, settings
+from redash.tasks.failure_report import send_aggregated_errors
+from redash.tasks.general import sync_user_details
+from redash.tasks.queries import (
+ cleanup_query_results,
empty_schedules,
+ refresh_queries,
refresh_schemas,
- cleanup_query_results,
- version_check,
- send_aggregated_errors,
- Queue,
+ remove_ghost_locks,
)
+from redash.tasks.worker import Queue
logger = logging.getLogger(__name__)
@@ -31,9 +29,7 @@ class StatsdRecordingScheduler(Scheduler):
queue_class = Queue
-rq_scheduler = StatsdRecordingScheduler(
- connection=rq_redis_connection, queue_name="periodic", interval=5
-)
+rq_scheduler = StatsdRecordingScheduler(connection=rq_redis_connection, queue_name="periodic", interval=5)
def job_id(kwargs):
@@ -70,7 +66,7 @@ def periodic_job_definitions():
{
"func": refresh_schemas,
"interval": timedelta(minutes=settings.SCHEMAS_REFRESH_SCHEDULE),
- },
+ },
{
"func": sync_user_details,
"timeout": 60,
@@ -83,19 +79,12 @@ def periodic_job_definitions():
},
]
- if settings.VERSION_CHECK:
- jobs.append({"func": version_check, "interval": timedelta(days=1)})
-
if settings.QUERY_RESULTS_CLEANUP_ENABLED:
jobs.append({"func": cleanup_query_results, "interval": timedelta(minutes=5)})
# Add your own custom periodic jobs in your dynamic_settings module.
jobs.extend(settings.dynamic_settings.periodic_jobs() or [])
- # Add periodic jobs that are shipped as part of Redash extensions
- extensions.load_periodic_jobs(logger)
- jobs.extend(list(extensions.periodic_jobs.values()))
-
return jobs
@@ -103,14 +92,11 @@ def schedule_periodic_jobs(jobs):
job_definitions = [prep(job) for job in jobs]
jobs_to_clean_up = Job.fetch_many(
- set([job.id for job in rq_scheduler.get_jobs()])
- - set([job_id(job) for job in job_definitions]),
+ set([job.id for job in rq_scheduler.get_jobs()]) - set([job_id(job) for job in job_definitions]),
rq_redis_connection,
)
- jobs_to_schedule = [
- job for job in job_definitions if job_id(job) not in rq_scheduler
- ]
+ jobs_to_schedule = [job for job in job_definitions if job_id(job) not in rq_scheduler]
for job in jobs_to_clean_up:
logger.info("Removing %s (%s) from schedule.", job.id, job.func_name)
diff --git a/redash/tasks/worker.py b/redash/tasks/worker.py
index 6a7d19379a..1983168d3d 100644
--- a/redash/tasks/worker.py
+++ b/redash/tasks/worker.py
@@ -1,13 +1,25 @@
import errno
import os
import signal
-import time
-from redash import statsd_client
-from rq import Queue as BaseQueue, get_current_job
-from rq.worker import HerokuWorker # HerokuWorker implements graceful shutdown on SIGTERM
+import sys
+
+from rq import Queue as BaseQueue
+from rq.job import Job as BaseJob
+from rq.job import JobStatus
+from rq.timeouts import HorseMonitorTimeoutException, UnixSignalDeathPenalty
from rq.utils import utcnow
-from rq.timeouts import UnixSignalDeathPenalty, HorseMonitorTimeoutException
-from rq.job import Job as BaseJob, JobStatus
+from rq.worker import (
+ HerokuWorker, # HerokuWorker implements graceful shutdown on SIGTERM
+ Worker,
+)
+
+from redash import statsd_client
+
+# HerokuWorker does not work in OSX https://github.com/getredash/redash/issues/5413
+if sys.platform == "darwin":
+ BaseWorker = Worker
+else:
+ BaseWorker = HerokuWorker
class CancellableJob(BaseJob):
@@ -41,7 +53,7 @@ class RedashQueue(StatsdRecordingQueue, CancellableQueue):
pass
-class StatsdRecordingWorker(HerokuWorker):
+class StatsdRecordingWorker(BaseWorker):
"""
RQ Worker Mixin that overrides `execute_job` to increment/modify metrics via Statsd
"""
@@ -59,7 +71,7 @@ def execute_job(self, job, queue):
statsd_client.incr("rq.jobs.failed.{}".format(queue.name))
-class HardLimitingWorker(HerokuWorker):
+class HardLimitingWorker(BaseWorker):
"""
RQ's work horses enforce time limits by setting a timed alarm and stopping jobs
when they reach their time limits. However, the work horse may be entirely blocked
@@ -110,9 +122,7 @@ def monitor_work_horse(self, job, queue):
job.started_at = utcnow()
while True:
try:
- with UnixSignalDeathPenalty(
- self.job_monitoring_interval, HorseMonitorTimeoutException
- ):
+ with UnixSignalDeathPenalty(self.job_monitoring_interval, HorseMonitorTimeoutException):
retpid, ret_val = os.waitpid(self._horse_pid, 0)
break
except HorseMonitorTimeoutException:
@@ -145,7 +155,6 @@ def monitor_work_horse(self, job, queue):
if job_status is None: # Job completed and its ttl has expired
return
if job_status not in [JobStatus.FINISHED, JobStatus.FAILED]:
-
if not job.ended_at:
job.ended_at = utcnow()
@@ -153,7 +162,7 @@ def monitor_work_horse(self, job, queue):
self.log.warning(
(
"Moving job to FailedJobRegistry "
- "(work-horse terminated unexpectedly; waitpid returned {})"
+ "(work-horse terminated unexpectedly; waitpid returned {})" # fmt: skip
).format(ret_val)
)
@@ -161,7 +170,7 @@ def monitor_work_horse(self, job, queue):
job,
queue=queue,
exc_string="Work-horse process was terminated unexpectedly "
- "(waitpid returned %s)" % ret_val,
+ "(waitpid returned %s)" % ret_val, # fmt: skip
)
diff --git a/redash/templates/emails/alert.html b/redash/templates/emails/alert.html
new file mode 100644
index 0000000000..a3e1ebd19f
--- /dev/null
+++ b/redash/templates/emails/alert.html
@@ -0,0 +1,92 @@
+
+
+
+
+
+
+
+ STATUS: {{ALERT_STATUS}}
+
+
+
+ CONDITION:
+
+ {{QUERY_RESULT_VALUE}} {{ALERT_CONDITION}} {{ALERT_THRESHOLD}}
+
+
+
+
+ QUERY:
+ {{QUERY_NAME}}
+
+
+
+
+
+ {{#QUERY_RESULT_COLS}}
+ {{friendly_name}}
+ {{/QUERY_RESULT_COLS}}
+
+
+
+{{#QUERY_RESULT_TABLE}}
+
+ {{#.}}
+ {{.}}
+ {{/.}}
+
+{{/QUERY_RESULT_TABLE}}
+
+
+
+
+
+