From 22eefae3da38abf81da5c266cad7926ec79af326 Mon Sep 17 00:00:00 2001 From: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com> Date: Wed, 14 Sep 2022 13:42:44 -0400 Subject: [PATCH 1/5] Ran pre-commit hook on the front-end --- cccs-build/superset/Dockerfile | 1 - .../analytical-platform-requirements.txt | 2 +- cccs-build/superset/requirements.txt | 2 +- requirements/local.txt | 1 - superset-frontend/README.md | 20 ++- superset-frontend/src/cccs-viz/README.md | 25 ++- .../src/ApplicationLinks.tsx | 4 +- .../src/plugin/controlPanel.ts | 152 +++++++++--------- .../plugin-chart-at-a-glance-dns/README.md | 4 +- .../test/plugin/transformProps.test.ts | 2 +- .../plugin-chart-at-a-glance-ip/README.md | 6 +- .../src/plugin/controlPanel.ts | 4 +- .../README.md | 6 +- .../src/plugin/controlPanel.ts | 4 +- .../plugins/plugin-chart-cccs-grid/README.md | 4 +- .../plugin-chart-cccs-grid/src/bignumber.mjs | 70 -------- .../src/plugin/buildQuery.ts | 26 +-- .../src/plugin/controlPanel.tsx | 7 +- .../plugin-chart-gwwk-charts/README.md | 5 +- superset/cccs/datasets.sql | 51 +++--- superset/charts/post_processing.py | 28 +++- superset/common/query_context.py | 2 +- superset/common/query_context_factory.py | 2 +- superset/connectors/sqla/models.py | 12 +- superset/initialization/__init__.py | 2 +- superset/models/core.py | 1 - superset/proxy/api.py | 35 ++-- superset/utils/cache_manager.py | 1 + superset/utils/date_parser.py | 2 +- .../importexport/f1410ed7ec95_tests.py | 10 +- tests/jinja_context_addons_tests.py | 144 +++++++++-------- 31 files changed, 291 insertions(+), 344 deletions(-) diff --git a/cccs-build/superset/Dockerfile b/cccs-build/superset/Dockerfile index 1f61ece92288..27640b957bc5 100644 --- a/cccs-build/superset/Dockerfile +++ b/cccs-build/superset/Dockerfile @@ -25,4 +25,3 @@ ENV BUILD_NUMBER_VAR=${BUILD_NUMBER} USER superset - diff --git a/cccs-build/superset/analytical-platform-requirements.txt b/cccs-build/superset/analytical-platform-requirements.txt index 3a2a63e02a8c..eedd0e8cd462 100644 --- a/cccs-build/superset/analytical-platform-requirements.txt +++ b/cccs-build/superset/analytical-platform-requirements.txt @@ -1 +1 @@ -hogwarts-auth~=1.8.0.7820 \ No newline at end of file +hogwarts-auth~=1.8.0.7820 diff --git a/cccs-build/superset/requirements.txt b/cccs-build/superset/requirements.txt index 9e122fa76396..5574d9e0a701 100644 --- a/cccs-build/superset/requirements.txt +++ b/cccs-build/superset/requirements.txt @@ -9,4 +9,4 @@ trino>=0.313.0 mysql-connector-python==8.0.26 elasticsearch-dbapi==0.2.4 cachetools~=5.0.0 -typing-extensions<4,>=3.10 \ No newline at end of file +typing-extensions<4,>=3.10 diff --git a/requirements/local.txt b/requirements/local.txt index 5edda1dacede..c4bd3cd599b3 100644 --- a/requirements/local.txt +++ b/requirements/local.txt @@ -13,4 +13,3 @@ # The following packages are considered to be unsafe in a requirements file: # setuptools - diff --git a/superset-frontend/README.md b/superset-frontend/README.md index 99506680c39f..704de1c66219 100644 --- a/superset-frontend/README.md +++ b/superset-frontend/README.md @@ -1,32 +1,42 @@ To pull our own superset-ui packages from our Artifacts Feed when building locally, the steps below should be followed. # How to build locally + ## Setup credentials + ### Step 1 + Copy the code below to your [user .npmrc](https://docs.microsoft.com/en-us/azure/devops/artifacts/npm/npmrc?view=azure-devops). + ``` -; begin auth token -//pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/registry/:username=cccs-analytical-platform -//pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/registry/:_password=[BASE64_ENCODED_PERSONAL_ACCESS_TOKEN] +; begin auth token +//pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/registry/:username=cccs-analytical-platform +//pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/registry/:_password=[BASE64_ENCODED_PERSONAL_ACCESS_TOKEN] //pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/registry/:email=npm requires email to be set but doesn't use the value -//pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/:username=cccs-analytical-platform -//pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/:_password=[BASE64_ENCODED_PERSONAL_ACCESS_TOKEN] +//pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/:username=cccs-analytical-platform +//pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/:_password=[BASE64_ENCODED_PERSONAL_ACCESS_TOKEN] //pkgs.dev.azure.com/cccs-analytical-platform/99130e50-b4e3-4d7d-873e-2a947f564b87/_packaging/analytical-platform/npm/:email=npm requires email to be set but doesn't use the value ; end auth token ``` ### Step 2 + Generate a [Personal Access Token](https://dev.azure.com/cccs-analytical-platform/_usersSettings/tokens) with Packaging read & write scopes. ### Step 3 + Base64 encode the personal access token from Step 2. One safe and secure method of Base64 encoding a string is to: + 1. From a command/shell prompt run: + ``` node -e "require('readline') .createInterface({input:process.stdin,output:process.stdout,historySize:0}) .question('PAT> ',p => { b64=Buffer.from(p.trim()).toString('base64');console.log(b64);process.exit(); })" ``` + 2. Paste your personal access token value and press Enter/Return 3. Copy the Base64 encoded value ### Step 4 + Replace both [BASE64_ENCODED_PERSONAL_ACCESS_TOKEN] values in your user .npmrc file with your personal access token from Step 3. diff --git a/superset-frontend/src/cccs-viz/README.md b/superset-frontend/src/cccs-viz/README.md index a42ac96306d8..d1901d3a45b8 100644 --- a/superset-frontend/src/cccs-viz/README.md +++ b/superset-frontend/src/cccs-viz/README.md @@ -17,14 +17,12 @@ specific language governing permissions and limitations under the License. --> -CCCS-VIZ for Superset -===================== +# CCCS-VIZ for Superset

-Creating a custom viz -===================== +# Creating a custom viz Modified instructions from https://superset.apache.org/docs/installation/building-custom-viz-plugins @@ -36,8 +34,8 @@ cd plugin-chart-data-grid yo @superset-ui/superset ``` - Edit `superset/superset-frontend/src/visualizations/presets/MainPreset.js` + ```diff + import { DataGridChartPlugin } from 'src/cccs-viz/plugins/'; @@ -53,6 +51,7 @@ Edit `superset/superset-frontend/src/visualizations/presets/MainPreset.js` ``` Run dev-server + ``` cd superset/superset-frontend @@ -66,13 +65,10 @@ Develop the custom viz. You can connect to port 9000 to test your modifications.

- - -Test your code changes inside the docker container -================== - +# Test your code changes inside the docker container Now ready to test docker build + ```bash cd superset @@ -81,6 +77,7 @@ docker build -t 'apache/superset:latest-dev' . ``` You can test your docker image using docker-compose, the docker-compose.yaml uses the image we just built. + ```bash cd superset @@ -90,20 +87,16 @@ docker-compose up You can connect to superset on port 8088 to test the superset server running inside the docker container. - -


- - -Commiting your code -================== +# Commiting your code When it works locally from your own docker container you can commit your changes Add custom viz files and the config files that reference it + ```bash cd superset diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-application-links/src/ApplicationLinks.tsx b/superset-frontend/src/cccs-viz/plugins/plugin-chart-application-links/src/ApplicationLinks.tsx index 9c22627aeeb0..5ce8abcd4d88 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-application-links/src/ApplicationLinks.tsx +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-application-links/src/ApplicationLinks.tsx @@ -108,7 +108,7 @@ export default function ApplicationLinks(props: ApplicationsProps) {

Alfred has seen {appVal.length > 1 ? 'these' : 'this'}{' '} {infoType} - {appVal.length > 1 ? "s" : ''} {alfredCount} time + {appVal.length > 1 ? 's' : ''} {alfredCount} time {alfredCount > 1 ? 's' : ''}. Search the{' '} Alfred @@ -119,7 +119,7 @@ export default function ApplicationLinks(props: ApplicationsProps) {

Alfred has not seen {appVal.length > 1 ? 'these' : 'this'}{' '} {infoType} - {appVal.length > 1 ? "s" : ''}. Search the{' '} + {appVal.length > 1 ? 's' : ''}. Search the{' '} Alfred {' '} diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-application-links/src/plugin/controlPanel.ts b/superset-frontend/src/cccs-viz/plugins/plugin-chart-application-links/src/plugin/controlPanel.ts index f9d27f76efcf..d95cba46d5da 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-application-links/src/plugin/controlPanel.ts +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-application-links/src/plugin/controlPanel.ts @@ -16,26 +16,26 @@ * specific language governing permissions and limitations * under the License. */ - import { ensureIsArray, t, validateNonEmpty } from '@superset-ui/core'; - import { - ControlPanelConfig, - ControlPanelState, - ControlState, - ControlStateMapping, - sharedControls, - } from '@superset-ui/chart-controls'; - - const validateAggControlValues = ( - controls: ControlStateMapping, - values: any[], - ) => { - const areControlsEmpty = values.every(val => ensureIsArray(val).length === 0); - // @ts-ignore - return areControlsEmpty ? [t('Metrics must have a value')] : []; - }; - - const config: ControlPanelConfig = { - /** +import { ensureIsArray, t, validateNonEmpty } from '@superset-ui/core'; +import { + ControlPanelConfig, + ControlPanelState, + ControlState, + ControlStateMapping, + sharedControls, +} from '@superset-ui/chart-controls'; + +const validateAggControlValues = ( + controls: ControlStateMapping, + values: any[], +) => { + const areControlsEmpty = values.every(val => ensureIsArray(val).length === 0); + // @ts-ignore + return areControlsEmpty ? [t('Metrics must have a value')] : []; +}; + +const config: ControlPanelConfig = { + /** * The control panel is split into two tabs: "Query" and * "Chart Options". The controls that define the inputs to * the chart data request, such as columns and metrics, usually @@ -108,59 +108,59 @@ * - validateInteger: must be an integer value * - validateNumber: must be an intger or decimal value */ - - // For control input types, see: superset-frontend/src/explore/components/controls/index.js - controlPanelSections: [ - { - label: t('Query'), - expanded: true, - controlSetRows: [ - ['adhoc_filters'], - [ - { - name: 'metrics', - override: { - // visibility: () => true, - validators: [], - mapStateToProps: ( - state: ControlPanelState, - controlState: ControlState, - ) => { - const { controls } = state; - const originalMapStateToProps = - sharedControls?.metrics?.mapStateToProps; - const newState = - originalMapStateToProps?.(state, controlState) ?? {}; - newState.externalValidationErrors = validateAggControlValues( - controls, - [controlState.value], - ); - return newState; - }, - }, - }, - ], - [ - { - name: 'row_limit', - override: { - default: 1, - }, - }, - ], - ], - }, - ], - - controlOverrides: { - series: { - validators: [validateNonEmpty], - clearable: false, - }, - row_limit: { - default: 1, - }, - }, - }; - - export default config; + + // For control input types, see: superset-frontend/src/explore/components/controls/index.js + controlPanelSections: [ + { + label: t('Query'), + expanded: true, + controlSetRows: [ + ['adhoc_filters'], + [ + { + name: 'metrics', + override: { + // visibility: () => true, + validators: [], + mapStateToProps: ( + state: ControlPanelState, + controlState: ControlState, + ) => { + const { controls } = state; + const originalMapStateToProps = + sharedControls?.metrics?.mapStateToProps; + const newState = + originalMapStateToProps?.(state, controlState) ?? {}; + newState.externalValidationErrors = validateAggControlValues( + controls, + [controlState.value], + ); + return newState; + }, + }, + }, + ], + [ + { + name: 'row_limit', + override: { + default: 1, + }, + }, + ], + ], + }, + ], + + controlOverrides: { + series: { + validators: [validateNonEmpty], + clearable: false, + }, + row_limit: { + default: 1, + }, + }, +}; + +export default config; diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-dns/README.md b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-dns/README.md index 07f3dad33f1f..ff1c4526c813 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-dns/README.md +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-dns/README.md @@ -8,9 +8,7 @@ Configure `key`, which can be any `string`, and register the plugin. This `key` ```js import AtAGlanceChartPlugin from '@superset-ui/plugin-chart-at-a-glance-dns'; -new AtAGlanceChartPlugin() - .configure({ key: 'at_a_glance_dns' }) - .register(); +new AtAGlanceChartPlugin().configure({ key: 'at_a_glance_dns' }).register(); ``` Then use it via `SuperChart`. See [storybook](https://apache-superset.github.io/superset-ui/?selectedKind=plugin-chart-at-a-glance) for more details. diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-dns/test/plugin/transformProps.test.ts b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-dns/test/plugin/transformProps.test.ts index ef3768fbd85f..6062437712c7 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-dns/test/plugin/transformProps.test.ts +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-dns/test/plugin/transformProps.test.ts @@ -21,7 +21,7 @@ describe('AtAGlance tranformProps', () => { data: [{ name: 'Hulk', sum__num: 1 }], }, ], - theme: supersetTheme + theme: supersetTheme, }); it('should tranform chart props for viz', () => { diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-ip/README.md b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-ip/README.md index e2390c0eb249..e3e74ae5f802 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-ip/README.md +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-ip/README.md @@ -1,7 +1,5 @@ ## @superset-ui/plugin-chart-at-a-glance-ip - - This plugin provides At A Glance for Superset. ### Usage @@ -10,9 +8,7 @@ Configure `key`, which can be any `string`, and register the plugin. This `key` ```js import AtAGlanceChartPlugin from '@superset-ui/plugin-chart-at-a-glance-ip'; -new AtAGlanceChartPlugin() - .configure({ key: 'at_a_glance_ip' }) - .register(); +new AtAGlanceChartPlugin().configure({ key: 'at_a_glance_ip' }).register(); ``` Then use it via `SuperChart`. See [storybook](https://apache-superset.github.io/superset-ui/?selectedKind=plugin-chart-at-a-glance) for more details. diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id-sas/src/plugin/controlPanel.ts b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id-sas/src/plugin/controlPanel.ts index be4baf2c932c..28254fd0b4c8 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id-sas/src/plugin/controlPanel.ts +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id-sas/src/plugin/controlPanel.ts @@ -85,13 +85,13 @@ const validateAggControlValues = ( const columnChoices = (datasource: any) => { if (datasource?.columns) { return datasource.columns - .map((col : any) => [col.column_name, col.verbose_name || col.column_name]) + .map((col: any) => [col.column_name, col.verbose_name || col.column_name]) .sort((opt1: any, opt2: any) => opt1[1].toLowerCase() > opt2[1].toLowerCase() ? 1 : -1, ); } return []; -} +}; const config: ControlPanelConfig = { // For control input types, see: superset-frontend/src/explore/components/controls/index.js controlPanelSections: [ diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id/README.md b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id/README.md index 0d64a8d94d3d..0dae3c8bfed6 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id/README.md +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id/README.md @@ -1,7 +1,5 @@ ## @superset-ui/plugin-chart-at-a-glance-user-id - - This plugin provides At A Glance User ID for Superset. ### Usage @@ -10,9 +8,7 @@ Configure `key`, which can be any `string`, and register the plugin. This `key` ```js import AtAGlanceChartPlugin from '@superset-ui/plugin-chart-at-a-glance-user-id'; -new AtAGlanceChartPlugin() - .configure({ key: 'at_a_glance_user_id' }) - .register(); +new AtAGlanceChartPlugin().configure({ key: 'at_a_glance_user_id' }).register(); ``` Then use it via `SuperChart`. See [storybook](https://apache-superset.github.io/superset-ui/?selectedKind=plugin-chart-at-a-glance-user-id) for more details. diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id/src/plugin/controlPanel.ts b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id/src/plugin/controlPanel.ts index bc590c126f82..6d9635df568c 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id/src/plugin/controlPanel.ts +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-at-a-glance-user-id/src/plugin/controlPanel.ts @@ -62,13 +62,13 @@ const isRawMode = isQueryMode(QueryMode.raw); const columnChoices = (datasource: any) => { if (datasource?.columns) { return datasource.columns - .map((col : any) => [col.column_name, col.verbose_name || col.column_name]) + .map((col: any) => [col.column_name, col.verbose_name || col.column_name]) .sort((opt1: any, opt2: any) => opt1[1].toLowerCase() > opt2[1].toLowerCase() ? 1 : -1, ); } return []; -} +}; const queryMode: ControlConfig<'RadioButtonControl'> = { type: 'RadioButtonControl', label: t('Query mode'), diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/README.md b/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/README.md index 1bf756924523..1c82e203ecdf 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/README.md +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/README.md @@ -11,9 +11,7 @@ Configure `key`, which can be any `string`, and register the plugin. This `key` ```js import CccsGridChartPlugin from '@superset-ui/plugin-chart-cccs-grid'; -new CccsGridChartPlugin() - .configure({ key: 'cccs-grid' }) - .register(); +new CccsGridChartPlugin().configure({ key: 'cccs-grid' }).register(); ``` Then use it via `SuperChart`. See [storybook](https://apache-superset.github.io/superset-ui/?selectedKind=plugin-chart-cccs-grid) for more details. diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/bignumber.mjs b/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/bignumber.mjs index 128f5f27ebd8..81618cfff891 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/bignumber.mjs +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/bignumber.mjs @@ -45,7 +45,6 @@ // * // */ - // var // isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, @@ -67,7 +66,6 @@ // // the arguments to toExponential, toFixed, toFormat, and toPrecision. // MAX = 1E9; // 0 to MAX_INT32 - // /* // * Create and return a BigNumber constructor. // */ @@ -76,10 +74,8 @@ // P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, // ONE = new BigNumber(1), - // //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- - // // The default values below must be integers within the inclusive ranges stated. // // The values can also be changed at run-time using BigNumber.set. @@ -162,13 +158,10 @@ // // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' // ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz'; - // //------------------------------------------------------------------------------------------ - // // CONSTRUCTOR - // /* // * The BigNumber constructor and exported function. // * Create and return a new instance of a BigNumber object. @@ -372,10 +365,8 @@ // } // } - // // CONSTRUCTOR PROPERTIES - // BigNumber.clone = clone; // BigNumber.ROUND_UP = 0; @@ -389,7 +380,6 @@ // BigNumber.ROUND_HALF_FLOOR = 8; // BigNumber.EUCLID = 9; - // /* // * Configure infrequently-changing library-wide settings. // * @@ -570,7 +560,6 @@ // }; // }; - // /* // * Return true if v is a BigNumber instance, otherwise return false. // * @@ -626,7 +615,6 @@ // (bignumberError + 'Invalid BigNumber: ' + v); // }; - // /* // * Return a new BigNumber whose value is the maximum of the arguments. // * @@ -636,7 +624,6 @@ // return maxOrMin(arguments, P.lt); // }; - // /* // * Return a new BigNumber whose value is the minimum of the arguments. // * @@ -646,7 +633,6 @@ // return maxOrMin(arguments, P.gt); // }; - // /* // * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, // * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing @@ -790,7 +776,6 @@ // }; // })(); - // /* // * Return a BigNumber whose value is the sum of the arguments. // * @@ -804,10 +789,8 @@ // return sum; // }; - // // PRIVATE FUNCTIONS - // // Called by BigNumber and BigNumber.prototype.toString. // convertBase = (function () { // var decimal = '0123456789'; @@ -958,7 +941,6 @@ // }; // })(); - // // Perform division in the specified base. Called by div and convertBase. // div = (function () { @@ -1218,7 +1200,6 @@ // }; // })(); - // /* // * Return a string representing the value of BigNumber n in fixed-point or exponential // * notation rounded to the specified decimal places or significant digits. @@ -1285,7 +1266,6 @@ // return n.s < 0 && c0 ? '-' + str : str; // } - // // Handle BigNumber.max and BigNumber.min. // function maxOrMin(args, method) { // var n, @@ -1307,7 +1287,6 @@ // return m; // } - // /* // * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. // * Called by minus, plus and times. @@ -1341,7 +1320,6 @@ // return n; // } - // // Handle values that fail the validity test in BigNumber. // parseNumeric = (function () { // var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, @@ -1391,7 +1369,6 @@ // } // })(); - // /* // * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. // * If r is truthy, it is known that there are more digits after the rounding digit. @@ -1553,7 +1530,6 @@ // return x; // } - // function valueOf(n) { // var str, // e = n.e; @@ -1569,10 +1545,8 @@ // return n.s < 0 ? '-' + str : str; // } - // // PROTOTYPE/INSTANCE METHODS - // /* // * Return a new BigNumber whose value is the absolute value of this BigNumber. // */ @@ -1582,7 +1556,6 @@ // return x; // }; - // /* // * Return // * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), @@ -1594,7 +1567,6 @@ // return compare(this, new BigNumber(y, b)); // }; - // /* // * If dp is undefined or null or true or false, return the number of decimal places of the // * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. @@ -1630,7 +1602,6 @@ // return n; // }; - // /* // * n / 0 = I // * n / N = N @@ -1655,7 +1626,6 @@ // return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); // }; - // /* // * Return a new BigNumber whose value is the integer part of dividing the value of this // * BigNumber by the value of BigNumber(y, b). @@ -1664,7 +1634,6 @@ // return div(this, new BigNumber(y, b), 0, 1); // }; - // /* // * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. // * @@ -1799,7 +1768,6 @@ // return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; // }; - // /* // * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer // * using rounding mode rm, or ROUNDING_MODE if rm is omitted. @@ -1815,7 +1783,6 @@ // return round(n, n.e + 1, rm); // }; - // /* // * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), // * otherwise return false. @@ -1824,7 +1791,6 @@ // return compare(this, new BigNumber(y, b)) === 0; // }; - // /* // * Return true if the value of this BigNumber is a finite number, otherwise return false. // */ @@ -1832,7 +1798,6 @@ // return !!this.c; // }; - // /* // * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), // * otherwise return false. @@ -1841,7 +1806,6 @@ // return compare(this, new BigNumber(y, b)) > 0; // }; - // /* // * Return true if the value of this BigNumber is greater than or equal to the value of // * BigNumber(y, b), otherwise return false. @@ -1851,7 +1815,6 @@ // }; - // /* // * Return true if the value of this BigNumber is an integer, otherwise return false. // */ @@ -1859,7 +1822,6 @@ // return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; // }; - // /* // * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), // * otherwise return false. @@ -1868,7 +1830,6 @@ // return compare(this, new BigNumber(y, b)) < 0; // }; - // /* // * Return true if the value of this BigNumber is less than or equal to the value of // * BigNumber(y, b), otherwise return false. @@ -1877,7 +1838,6 @@ // return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; // }; - // /* // * Return true if the value of this BigNumber is NaN, otherwise return false. // */ @@ -1885,7 +1845,6 @@ // return !this.s; // }; - // /* // * Return true if the value of this BigNumber is negative, otherwise return false. // */ @@ -1893,7 +1852,6 @@ // return this.s < 0; // }; - // /* // * Return true if the value of this BigNumber is positive, otherwise return false. // */ @@ -1901,7 +1859,6 @@ // return this.s > 0; // }; - // /* // * Return true if the value of this BigNumber is 0 or -0, otherwise return false. // */ @@ -1909,7 +1866,6 @@ // return !!this.c && this.c[0] == 0; // }; - // /* // * n - 0 = n // * n - N = N @@ -2042,7 +1998,6 @@ // return normalise(y, xc, ye); // }; - // /* // * n % 0 = N // * n % N = N @@ -2100,7 +2055,6 @@ // return y; // }; - // /* // * n * 0 = 0 // * n * N = N @@ -2191,7 +2145,6 @@ // return normalise(y, zc, e); // }; - // /* // * Return a new BigNumber whose value is the value of this BigNumber negated, // * i.e. multiplied by -1. @@ -2202,7 +2155,6 @@ // return x; // }; - // /* // * n + 0 = n // * n + N = N @@ -2296,7 +2248,6 @@ // return normalise(y, xc, ye); // }; - // /* // * If sd is undefined or null or true or false, return the number of significant digits of // * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. @@ -2342,7 +2293,6 @@ // return n; // }; - // /* // * Return a new BigNumber whose value is the value of this BigNumber shifted by k places // * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. @@ -2356,7 +2306,6 @@ // return this.times('1e' + k); // }; - // /* // * sqrt(-n) = N // * sqrt(N) = N @@ -2466,7 +2415,6 @@ // return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); // }; - // /* // * Return a string representing the value of this BigNumber in exponential notation and // * rounded using ROUNDING_MODE to dp fixed decimal places. @@ -2484,7 +2432,6 @@ // return format(this, dp, rm, 1); // }; - // /* // * Return a string representing the value of this BigNumber in fixed-point notation rounding // * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. @@ -2505,7 +2452,6 @@ // return format(this, dp, rm); // }; - // /* // * Return a string representing the value of this BigNumber in fixed-point notation rounded // * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties @@ -2585,7 +2531,6 @@ // return (format.prefix || '') + str + (format.suffix || ''); // }; - // /* // * Return an array of two BigNumbers representing the value of this BigNumber as a simple // * fraction with an integer numerator and an integer denominator. @@ -2660,7 +2605,6 @@ // return r; // }; - // /* // * Return the value of this BigNumber converted to a number primitive. // */ @@ -2668,7 +2612,6 @@ // return +valueOf(this); // }; - // /* // * Return a string representing the value of this BigNumber rounded to sd significant digits // * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits @@ -2685,7 +2628,6 @@ // return format(this, sd, rm, 2); // }; - // /* // * Return a string representing the value of this BigNumber in base b, or base 10 if b is // * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and @@ -2730,7 +2672,6 @@ // return str; // }; - // /* // * Return as toString, but do not accept a base argument, and include the minus sign for // * negative zero. @@ -2739,7 +2680,6 @@ // return valueOf(this); // }; - // P._isBigNumber = true; // P[Symbol.toStringTag] = 'BigNumber'; @@ -2752,19 +2692,16 @@ // return BigNumber; // } - // // PRIVATE HELPER FUNCTIONS // // These functions don't need access to variables, // // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. - // function bitFloor(n) { // var i = n | 0; // return n > 0 || n === i ? i : i - 1; // } - // // Return a coefficient array as a string of base 10 digits. // function coeffToString(a) { // var s, z, @@ -2785,7 +2722,6 @@ // return r.slice(0, j + 1 || 1); // } - // // Compare the value of BigNumbers x and y. // function compare(x, y) { // var a, b, @@ -2826,7 +2762,6 @@ // return k == l ? 0 : k > l ^ a ? 1 : -1; // } - // /* // * Check that n is a primitive number, an integer, and in range, otherwise throw. // */ @@ -2839,20 +2774,17 @@ // } // } - // // Assumes finite n. // function isOdd(n) { // var k = n.c.length - 1; // return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; // } - // function toExponential(str, e) { // return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + // (e < 0 ? 'e' : 'e+') + e; // } - // function toFixedPoint(str, e, z) { // var len, zs; @@ -2879,10 +2811,8 @@ // return str; // } - // // EXPORT - // export var BigNumber = clone(); // export default BigNumber; diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/plugin/buildQuery.ts b/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/plugin/buildQuery.ts index 2374de369f5a..b1a0fd98c2f2 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/plugin/buildQuery.ts +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/plugin/buildQuery.ts @@ -62,7 +62,8 @@ const buildQuery: BuildQuery = ( ...formData, ...DEFAULT_FORM_DATA, }; - const { percent_metrics: percentMetrics, order_desc: orderDesc = false } = formData; + const { percent_metrics: percentMetrics, order_desc: orderDesc = false } = + formData; // never include time in raw records mode if (queryMode === QueryMode.raw) { formDataCopy = { @@ -86,17 +87,20 @@ const buildQuery: BuildQuery = ( } // add postprocessing for percent metrics only when in aggregation mode if (percentMetrics && percentMetrics.length > 0) { - const percentMetricLabels = removeDuplicates(percentMetrics.map(getMetricLabel)); - metrics = removeDuplicates(metrics.concat(percentMetrics), getMetricLabel); - postProcessing.push( - { - operation: 'contribution', - options: { - columns: percentMetricLabels as string[], - rename_columns: percentMetricLabels.map(x => `%${x}`), - }, - }, + const percentMetricLabels = removeDuplicates( + percentMetrics.map(getMetricLabel), + ); + metrics = removeDuplicates( + metrics.concat(percentMetrics), + getMetricLabel, ); + postProcessing.push({ + operation: 'contribution', + options: { + columns: percentMetricLabels as string[], + rename_columns: percentMetricLabels.map(x => `%${x}`), + }, + }); } } diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/plugin/controlPanel.tsx b/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/plugin/controlPanel.tsx index 45d3a1a73374..3ac3c2d453df 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/plugin/controlPanel.tsx +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-cccs-grid/src/plugin/controlPanel.tsx @@ -100,8 +100,6 @@ const validateAggControlValues = ( : []; }; - - const validateAggColumnValues = ( controls: ControlStateMapping, values: any[], @@ -191,10 +189,7 @@ const validateAggColumnValues = ( // return false; // } - -const defineSavedMetrics = ( - datasource: Dataset | QueryResponse | null, -) => +const defineSavedMetrics = (datasource: Dataset | QueryResponse | null) => datasource?.hasOwnProperty('metrics') ? (datasource as Dataset)?.metrics || [] : DEFAULT_METRICS; diff --git a/superset-frontend/src/cccs-viz/plugins/plugin-chart-gwwk-charts/README.md b/superset-frontend/src/cccs-viz/plugins/plugin-chart-gwwk-charts/README.md index 829305c71b21..a7894330050e 100644 --- a/superset-frontend/src/cccs-viz/plugins/plugin-chart-gwwk-charts/README.md +++ b/superset-frontend/src/cccs-viz/plugins/plugin-chart-gwwk-charts/README.md @@ -11,9 +11,7 @@ Configure `key`, which can be any `string`, and register the plugin. This `key` ```js import GWWKChartPlugin from '@superset-ui/plugin-chart-gwwk-charts'; -new GWWKChartPlugin() - .configure({ key: 'gwwk-charts' }) - .register(); +new GWWKChartPlugin().configure({ key: 'gwwk-charts' }).register(); ``` Then use it via `SuperChart`. See [storybook](https://apache-superset.github.io/superset-ui/?selectedKind=plugin-chart-gwwk-charts) for more details. @@ -29,4 +27,3 @@ Then use it via `SuperChart`. See [storybook](https://apache-superset.github.io/ }]} /> ``` - diff --git a/superset/cccs/datasets.sql b/superset/cccs/datasets.sql index d56827510eed..9ea492c2c66c 100644 --- a/superset/cccs/datasets.sql +++ b/superset/cccs/datasets.sql @@ -98,7 +98,7 @@ select MOD(end_ip_num/256, 256), '.', MOD(end_ip_num, 256) ) as end_ip_string, - case + case when MOD(end_ip_num, 256) % 5 = 0 then 'Canada' when MOD(end_ip_num, 256) % 5 = 1 then 'Austria' when MOD(end_ip_num, 256) % 5 = 2 then 'Belarus' @@ -123,11 +123,11 @@ select - - - - - + + + + + CREATE TABLE employees ( @@ -188,15 +188,15 @@ VALUES (3, 'Failed Login'), (4, 'Wrong Password'); - -create table cccs_aad as ( + +create table cccs_aad as ( select - event_time, - ip_num, - domain, - action_name, + event_time, + ip_num, + domain, + action_name, full_name, CONCAT( MOD(ip_num/16777216, 256) , '.', @@ -225,15 +225,15 @@ where create table cccs_virus_total as ( -select +select src_ip_string as ip_string, src_ip_num as ip_num, src_ip_num % 5 as malicious_level from ( - select distinct + select distinct src_ip_string, - src_ip_num - from cccs_flow + src_ip_num + from cccs_flow ) t ); @@ -358,17 +358,17 @@ VALUES create table cccs_domains_lookup as ( - select + select f.src_ip_string as ip_string, f.src_ip_num as ip_num, - d.domain_name as domain_name + d.domain_name as domain_name from (select distinct src_ip_string, src_ip_num from cccs_flow) f inner join domains d on (f.src_ip_num % 100) = d.id ); create table cccs_blocked_domains as ( - select + select domain_name, random() > 0.5 as quad_nine_block, random() > 0.5 as google_block, @@ -377,16 +377,3 @@ create table cccs_blocked_domains as ( random() > 0.5 as cira_block from (select distinct domain_name from cccs_domains_lookup) t ); - - - - - - - - - - - - - diff --git a/superset/charts/post_processing.py b/superset/charts/post_processing.py index d78370a11d78..db957ca94e14 100644 --- a/superset/charts/post_processing.py +++ b/superset/charts/post_processing.py @@ -31,15 +31,16 @@ from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING import pandas as pd + from superset import app from superset.common.chart_data import ChartDataResultFormat +from superset.common.query_context import QueryContext from superset.utils.core import ( DTTM_ALIAS, extract_dataframe_dtypes, get_column_names, get_metric_names, ) -from superset.common.query_context import QueryContext if TYPE_CHECKING: from superset.connectors.base.models import BaseDatasource @@ -47,6 +48,7 @@ config = app.config logger = logging.getLogger(__name__) + def get_column_key(label: Tuple[str, ...], metrics: List[str]) -> Tuple[Any, ...]: """ Sort columns when combining metrics. @@ -318,7 +320,10 @@ def AgGrid(result: Dict[Any, Any], form_data: Dict[str, Any]) -> Dict[Any, Any]: return result -def AtAGlanceUserIDCore(result: Dict[Any, Any], form_data: Dict[str, Any]) -> Dict[Any, Any]: + +def AtAGlanceUserIDCore( + result: Dict[Any, Any], form_data: Dict[str, Any] +) -> Dict[Any, Any]: """ AAG User ID. """ @@ -338,10 +343,17 @@ def AtAGlanceUserIDCore(result: Dict[Any, Any], form_data: Dict[str, Any]) -> Di "at_a_glance_user_id": AgGrid, "at_a_glance_ip": AgGrid, "at_a_glance_dns": AgGrid, - "at_a_glance_user_id_sas": AgGrid + "at_a_glance_user_id_sas": AgGrid, } -rawPostProcess = ["cccs_grid", "at_a_glance_user_id", "at_a_glance_ip", "at_a_glance_dns", "at_a_glance_user_id_sas"] +rawPostProcess = [ + "cccs_grid", + "at_a_glance_user_id", + "at_a_glance_ip", + "at_a_glance_dns", + "at_a_glance_user_id_sas", +] + def apply_post_process( result: Dict[Any, Any], @@ -352,14 +364,18 @@ def apply_post_process( viz_type = form_data.get("viz_type", "") if not viz_type: - viz_type = result.get("query_context", QueryContext).viz_type if result.get("query_context", QueryContext).viz_type else "" + viz_type = ( + result.get("query_context", QueryContext).viz_type + if result.get("query_context", QueryContext).viz_type + else "" + ) if viz_type not in post_processors: return result post_processor = post_processors[viz_type] - if (result["query_context"].result_format == ChartDataResultFormat.CSV): + if result["query_context"].result_format == ChartDataResultFormat.CSV: for query in result["queries"]: df = pd.read_csv(StringIO(query["data"])) processed_df = post_processor(df, form_data) diff --git a/superset/common/query_context.py b/superset/common/query_context.py index 7d1e8e75190c..d8c3645f200a 100644 --- a/superset/common/query_context.py +++ b/superset/common/query_context.py @@ -71,7 +71,7 @@ def __init__( force: bool = False, custom_cache_timeout: Optional[int] = None, cache_values: Dict[str, Any], - viz_type: Optional[str] + viz_type: Optional[str], ) -> None: self.datasource = datasource self.result_type = result_type diff --git a/superset/common/query_context_factory.py b/superset/common/query_context_factory.py index 35ae9d07cd22..3bd38c501c36 100644 --- a/superset/common/query_context_factory.py +++ b/superset/common/query_context_factory.py @@ -51,7 +51,7 @@ def create( result_format: Optional[ChartDataResultFormat] = None, force: bool = False, custom_cache_timeout: Optional[int] = None, - viz_type: Optional[str] = None + viz_type: Optional[str] = None, ) -> QueryContext: datasource_model_instance = None if datasource: diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index c99a269a55f3..9cef18121774 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -1519,8 +1519,14 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma utils.FilterOperator.NOT_IN.value, ) - col_advanced_data_type: str = col_obj.advanced_data_type if col_obj else "" - col_advanced_data_type = col_advanced_data_type.lower() if col_advanced_data_type else col_advanced_data_type + col_advanced_data_type: str = ( + col_obj.advanced_data_type if col_obj else "" + ) + col_advanced_data_type = ( + col_advanced_data_type.lower() + if col_advanced_data_type + else col_advanced_data_type + ) if col_spec and not col_advanced_data_type: target_generic_type = col_spec.generic_type else: @@ -2483,7 +2489,7 @@ def write_shadow_dataset( ) session.add(new_dataset) - + sa.event.listen(SqlaTable, "before_update", SqlaTable.before_update) sa.event.listen(SqlaTable, "after_insert", SqlaTable.after_insert) sa.event.listen(SqlaTable, "after_delete", SqlaTable.after_delete) diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index 0acc6d0d0072..3c5a43751834 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -132,7 +132,6 @@ def init_views(self) -> None: from superset.dashboards.filter_state.api import DashboardFilterStateRestApi from superset.dashboards.permalink.api import DashboardPermalinkRestApi from superset.databases.api import DatabaseRestApi - from superset.proxy.api import ProxyRestAPI from superset.datasets.api import DatasetRestApi from superset.datasets.columns.api import DatasetColumnsRestApi from superset.datasets.metrics.api import DatasetMetricRestApi @@ -141,6 +140,7 @@ def init_views(self) -> None: from superset.explore.form_data.api import ExploreFormDataRestApi from superset.explore.permalink.api import ExplorePermalinkRestApi from superset.importexport.api import ImportExportRestApi + from superset.proxy.api import ProxyRestAPI from superset.queries.api import QueryRestApi from superset.queries.saved_queries.api import SavedQueryRestApi from superset.reports.api import ReportScheduleRestApi diff --git a/superset/models/core.py b/superset/models/core.py index 4973ca87f73a..d21ac56dad5a 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -210,7 +210,6 @@ def allows_virtual_table_explore(self) -> bool: return bool(extra.get("allows_virtual_table_explore", True)) - @property def explore_database_id(self) -> int: return self.get_extra().get("explore_database_id", self.id) diff --git a/superset/proxy/api.py b/superset/proxy/api.py index c64b29f4a95e..3b0d3ae09e6c 100644 --- a/superset/proxy/api.py +++ b/superset/proxy/api.py @@ -96,7 +96,7 @@ def error_obtaining_response( and exception that was caught when trying to get a response from an application :param token_name: String value representing which app we are linking to - :param raised_exception: Exception value representing the error that occurred + :param raised_exception: Exception value representing the error that occurred :returns: Response generated from passing values to the attach_url function """ logger.error("Error obtaining %s response: %s", token_name, raised_exception) @@ -115,18 +115,19 @@ def make_alfred_connection(self, url: str) -> Response: :param url: String value representing the URL we will send an API call to :returns: Response generated from passing values to the attach_url function """ - if (self.ALFRED_SCOPE is None and self.ALFRED_URL is None): - return self.error_obtaining_response("Alfred", "No Alfred Scope and No Alfred URL") - if (self.ALFRED_SCOPE is None): + if self.ALFRED_SCOPE is None and self.ALFRED_URL is None: + return self.error_obtaining_response( + "Alfred", "No Alfred Scope and No Alfred URL" + ) + if self.ALFRED_SCOPE is None: return self.error_obtaining_response("Alfred", "No Alfred Scope") - if (self.ALFRED_URL is None): + if self.ALFRED_URL is None: return self.error_obtaining_response("Alfred", "No Alfred URL") try: - alfred_token = security_manager.get_on_behalf_of_access_token_with_cache(current_user.username, - self.ALFRED_SCOPE, - 'alfred', - cache_result=True) + alfred_token = security_manager.get_on_behalf_of_access_token_with_cache( + current_user.username, self.ALFRED_SCOPE, "alfred", cache_result=True + ) if not alfred_token: raise Exception("Unable to fetch Alfred token") except (requests.exceptions.HTTPError, Exception) as err: @@ -163,7 +164,7 @@ def get_userid(self, user_id: str, **_kwargs: Any) -> Response: :returns: Response generated from passing values to the make_alfred_connection function """ user_emails = user_id.split(",") - user_email_string = '' + user_email_string = "" if len(user_emails) > 0: user_email_string = user_emails[0] @@ -171,11 +172,11 @@ def get_userid(self, user_id: str, **_kwargs: Any) -> Response: user_email_string += "%22%2C%20%22" + user_emails[index] url = ( - self.ALFRED_URL - + "/rest/search/cypher?expression=MATCH%20(email%3AEMAIL_ADDRESS)%20WHERE%20email.value%20IN%20%5B%22" - + user_email_string - + "%22%5D%20RETURN%20email.value%2C%20email.maliciousness%2C%20email.uri" - ) + self.ALFRED_URL + + "/rest/search/cypher?expression=MATCH%20(email%3AEMAIL_ADDRESS)%20WHERE%20email.value%20IN%20%5B%22" + + user_email_string + + "%22%5D%20RETURN%20email.value%2C%20email.maliciousness%2C%20email.uri" + ) return self.make_alfred_connection(url) @@ -195,13 +196,13 @@ def get_ipstring(self, ip_string: str, **_kwargs: Any) -> Response: :returns: Response generated from passing values to the make_alfred_connection function """ user_ips = ip_string.split(",") - user_ip_string = '' + user_ip_string = "" if len(user_ips) > 0: user_ip_string = user_ips[0] for index in range(1, len(user_ips)): user_ip_string += "%22%2C%20%22" + user_ips[index] - + url = ( self.ALFRED_URL + "/rest/search/cypher?expression=MATCH%20(ip%3AIP_ADDRESS)%20WHERE%20ip.value%20IN%20%5B%22" diff --git a/superset/utils/cache_manager.py b/superset/utils/cache_manager.py index 7c40c32785a2..c307504a1b9c 100644 --- a/superset/utils/cache_manager.py +++ b/superset/utils/cache_manager.py @@ -48,6 +48,7 @@ def get(self, *args: Any, **kwargs: Any) -> Optional[Union[str, Markup]]: return cache + logger = logging.getLogger(__name__) CACHE_IMPORT_PATH = "superset.extensions.metastore_cache.SupersetMetastoreCache" diff --git a/superset/utils/date_parser.py b/superset/utils/date_parser.py index 54ac2ffdce10..ec35390aa1e2 100644 --- a/superset/utils/date_parser.py +++ b/superset/utils/date_parser.py @@ -40,6 +40,7 @@ Suppress, ) +from superset import app from superset.charts.commands.exceptions import ( TimeDeltaAmbiguousError, TimeRangeAmbiguousError, @@ -47,7 +48,6 @@ ) from superset.utils.core import NO_TIME_RANGE from superset.utils.memoized import memoized -from superset import app ParserElement.enablePackrat() diff --git a/tests/integration_tests/importexport/f1410ed7ec95_tests.py b/tests/integration_tests/importexport/f1410ed7ec95_tests.py index 2b48b56762b0..c60d0a74beb5 100644 --- a/tests/integration_tests/importexport/f1410ed7ec95_tests.py +++ b/tests/integration_tests/importexport/f1410ed7ec95_tests.py @@ -48,7 +48,11 @@ { "filterType": "filter_select", "cascadingFilters": True, - "defaultDataMask": {"filterState": {"value": ["Albania", "Algeria"],},}, + "defaultDataMask": { + "filterState": { + "value": ["Albania", "Algeria"], + }, + }, } ], "filter_sets_configuration": [ @@ -58,7 +62,9 @@ "filterType": "filter_select", "cascadingFilters": True, "defaultDataMask": { - "filterState": {"value": ["Albania", "Algeria"],}, + "filterState": { + "value": ["Albania", "Algeria"], + }, }, }, }, diff --git a/tests/jinja_context_addons_tests.py b/tests/jinja_context_addons_tests.py index 376f43641235..eab0b9cb5d20 100644 --- a/tests/jinja_context_addons_tests.py +++ b/tests/jinja_context_addons_tests.py @@ -14,134 +14,150 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from ipaddress import NetmaskValueError, AddressValueError +from ipaddress import AddressValueError, NetmaskValueError + from jinja_context_addons import * -# Imports Jinja Context Addons from PYTHONPATH env variable, make sure to point the env var to the dir containing it from tests.base_tests import SupersetTestCase +# Imports Jinja Context Addons from PYTHONPATH env variable, make sure to point the env var to the dir containing it + + class Jinja2ContextAddonsTest(SupersetTestCase): maxDiff = None # Test for Correctness def test_ipv4str_to_number_template(self) -> None: - rendered = ipv4str_to_number('192.168.0.0') + rendered = ipv4str_to_number("192.168.0.0") self.assertEqual(3232235520, rendered) def test_render_ipv4_column_template(self) -> None: test_filter = [ - { - "col": "src_ip_num", - "op": "==", - "val": "1.1.1.1" - }, - { - "col": "src_ip_num", - "op": "IN", - "val": ['3.3.3.3', '2.2.2.2'] - } + {"col": "src_ip_num", "op": "==", "val": "1.1.1.1"}, + {"col": "src_ip_num", "op": "IN", "val": ["3.3.3.3", "2.2.2.2"]}, ] rendered = render_ipv4_number_column(test_filter, "src_num_ip") - self.assertEqual(" AND (src_num_ip = 16843009) AND ((src_num_ip = 50529027) OR (src_num_ip = 33686018))", rendered) + self.assertEqual( + " AND (src_num_ip = 16843009) AND ((src_num_ip = 50529027) OR (src_num_ip = 33686018))", + rendered, + ) def test_render_ipv4_either_number_columns_template(self) -> None: test_filter = [ - { - "col": "src_ip_num", - "op": "==", - "val": ['3.0.0.0/8', '2.2.2.2'] - } + {"col": "src_ip_num", "op": "==", "val": ["3.0.0.0/8", "2.2.2.2"]} ] - rendered = render_ipv4_either_number_columns(test_filter, "src_num_ip", "dst_num_ip") - self.assertEqual(" AND ((src_num_ip >= 50331648 AND src_num_ip <= 67108863) OR (src_num_ip = 33686018) OR (dst_num_ip >= 50331648 AND dst_num_ip <= 67108863) OR (dst_num_ip = 33686018))", rendered) - + rendered = render_ipv4_either_number_columns( + test_filter, "src_num_ip", "dst_num_ip" + ) + self.assertEqual( + " AND ((src_num_ip >= 50331648 AND src_num_ip <= 67108863) OR (src_num_ip = 33686018) OR (dst_num_ip >= 50331648 AND dst_num_ip <= 67108863) OR (dst_num_ip = 33686018))", + rendered, + ) def test_render_ipv4_between_number_colums_template(self) -> None: test_filter = [ - { - "col": "src_ip_num", - "op": "LIKE", - "val": ['12.0.0.0/8', '2.0.0.0/16'] - } + {"col": "src_ip_num", "op": "LIKE", "val": ["12.0.0.0/8", "2.0.0.0/16"]} ] - rendered = render_ipv4_between_number_colums(test_filter, '1.1.1.1', '2.2.2.2' ) + rendered = render_ipv4_between_number_colums(test_filter, "1.1.1.1", "2.2.2.2") self.assertEqual( - ''' AND (( (1.1.1.1 <= 201326592 AND 2.2.2.2 >= 201326592) + """ AND (( (1.1.1.1 <= 201326592 AND 2.2.2.2 >= 201326592) OR (1.1.1.1 <= 218103807 AND 2.2.2.2 >= 218103807) OR (201326592 <= 1.1.1.1 AND 2.2.2.2 <= 218103807) ) OR ( (1.1.1.1 <= 33554432 AND 2.2.2.2 >= 33554432) OR (1.1.1.1 <= 33619967 AND 2.2.2.2 >= 33619967) - OR (33554432 <= 1.1.1.1 AND 2.2.2.2 <= 33619967) ))''' - , rendered) - + OR (33554432 <= 1.1.1.1 AND 2.2.2.2 <= 33619967) ))""", + rendered, + ) def test_render_in_conditions_template(self) -> None: - test_ip_array=['1.1.1.1','240.0.0.0/4'] + test_ip_array = ["1.1.1.1", "240.0.0.0/4"] rendered = render_in_conditions(test_ip_array, "src_num_ip") - self.assertEqual(['(src_num_ip = 16843009)', '(src_num_ip >= 4026531840 AND src_num_ip <= 4294967295)'], rendered) + self.assertEqual( + [ + "(src_num_ip = 16843009)", + "(src_num_ip >= 4026531840 AND src_num_ip <= 4294967295)", + ], + rendered, + ) def test_dashboard_link_template(self) -> None: # TODO Update this test once the dashboard function is fully implemented and complete test_link_label = "LABEL" test_dashboard_id = 2301 - test_src_column = 'test_col' - test_target_column = 'target_col' + test_src_column = "test_col" + test_target_column = "target_col" - rendered = dashboard_link(test_link_label, test_dashboard_id, test_src_column, test_target_column) - self.assertEqual(" concat('LABEL' ) ", rendered) + rendered = dashboard_link( + test_link_label, test_dashboard_id, test_src_column, test_target_column + ) + self.assertEqual( + " concat('LABEL' ) ", + rendered, + ) - #Test for Exceptions + # Test for Exceptions def test_ipv4str_to_number_template_invalid_ip(self) -> None: # Invalid Ip 1912.168.0.0 - self.assertRaises(OSError, ipv4str_to_number, '1912.168.0.0') + self.assertRaises(OSError, ipv4str_to_number, "1912.168.0.0") def test_render_ipv4_column_template_exception(self) -> None: # The ValueError in this test case comes from the '3.3.3.3/8' CIDR # This is because the correct way to describe that range of ip's is to start from 3.0.0.0/8 test_filter = [ - { - "col": "src_ip_num", - "op": "==", - "val": "1.1.1.1" - }, - { - "col": "src_ip_num", - "op": "IN", - "val": ['3.3.3.3/8'] - } + {"col": "src_ip_num", "op": "==", "val": "1.1.1.1"}, + {"col": "src_ip_num", "op": "IN", "val": ["3.3.3.3/8"]}, ] - self.assertRaises(ValueError, render_ipv4_number_column, test_filter, 'src_ip_num' ) + self.assertRaises( + ValueError, render_ipv4_number_column, test_filter, "src_ip_num" + ) def test_render_ipv4_either_number_columns_template_invalid_cidr(self) -> None: # Invalid error cidr comes from 2.2.2.200/34 test_filter = [ - { - "col": "src_ip_num", - "op": "==", - "val": ['3.0.0.0/8', '2.2.2.200/34'] - } + {"col": "src_ip_num", "op": "==", "val": ["3.0.0.0/8", "2.2.2.200/34"]} ] - self.assertRaises(NetmaskValueError, render_ipv4_either_number_columns, test_filter, "src_num_ip", "dst_num_ip") + self.assertRaises( + NetmaskValueError, + render_ipv4_either_number_columns, + test_filter, + "src_num_ip", + "dst_num_ip", + ) def test_render_ipv4_between_number_colums_template_invalid_arguments(self) -> None: test_filter = [ { "col": "src_ip_num", "op": "2", - "val": ['255.255.255.255/0', '80.0.0.0/16'] + "val": ["255.255.255.255/0", "80.0.0.0/16"], } ] - self.assertRaises(ValueError, render_ipv4_between_number_colums, test_filter, '1.1.1.1', '2.2.2.2') + self.assertRaises( + ValueError, + render_ipv4_between_number_colums, + test_filter, + "1.1.1.1", + "2.2.2.2", + ) def test_render_in_conditions_template_invalid_cidr(self) -> None: - test_ip_array=['1.10.0.1.1','240.0.0.0/4.0'] - self.assertRaises(AddressValueError, render_in_conditions, test_ip_array, "src_num_ip") + test_ip_array = ["1.10.0.1.1", "240.0.0.0/4.0"] + self.assertRaises( + AddressValueError, render_in_conditions, test_ip_array, "src_num_ip" + ) def test_dashboard_link_template_invalid_label_type(self) -> None: # TODO Update this test once the dashboard function is fully implemented and complete test_link_label = 123 test_dashboard_id = -100 - test_src_column = 'test_col' - test_target_column = 'target_col' - self.assertRaises(TypeError, dashboard_link, test_link_label, test_dashboard_id, test_src_column, test_target_column) + test_src_column = "test_col" + test_target_column = "target_col" + self.assertRaises( + TypeError, + dashboard_link, + test_link_label, + test_dashboard_id, + test_src_column, + test_target_column, + ) From b11c79d8f72621d437690a6b23c9d060d1cd9f4b Mon Sep 17 00:00:00 2001 From: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com> Date: Wed, 14 Sep 2022 14:35:56 -0400 Subject: [PATCH 2/5] Removed unused file & removed duplicate method --- superset/utils/cache_manager.py | 27 ----- tests/jinja_context_addons_tests.py | 163 ---------------------------- 2 files changed, 190 deletions(-) delete mode 100644 tests/jinja_context_addons_tests.py diff --git a/superset/utils/cache_manager.py b/superset/utils/cache_manager.py index c307504a1b9c..0a0f9c71b78f 100644 --- a/superset/utils/cache_manager.py +++ b/superset/utils/cache_manager.py @@ -91,33 +91,6 @@ def _init_cache( cache.init_app(app, cache_config) - @staticmethod - def _init_cache( - app: Flask, cache: Cache, cache_config_key: str, required: bool = False - ) -> None: - cache_config = app.config[cache_config_key] - cache_type = cache_config.get("CACHE_TYPE") - if (required and cache_type is None) or cache_type == "SupersetMetastoreCache": - if cache_type is None and not app.debug: - logger.warning( - "Falling back to the built-in cache, that stores data in the " - "metadata database, for the followinng cache: `%s`. " - "It is recommended to use `RedisCache`, `MemcachedCache` or " - "another dedicated caching backend for production deployments", - cache_config_key, - ) - cache_type = CACHE_IMPORT_PATH - cache_key_prefix = cache_config.get("CACHE_KEY_PREFIX", cache_config_key) - cache_config.update( - {"CACHE_TYPE": cache_type, "CACHE_KEY_PREFIX": cache_key_prefix} - ) - - if cache_type is not None and "CACHE_DEFAULT_TIMEOUT" not in cache_config: - default_timeout = app.config.get("CACHE_DEFAULT_TIMEOUT") - cache_config["CACHE_DEFAULT_TIMEOUT"] = default_timeout - - cache.init_app(app, cache_config) - def init_app(self, app: Flask) -> None: self._init_cache(app, self._cache, "CACHE_CONFIG") self._init_cache(app, self._data_cache, "DATA_CACHE_CONFIG") diff --git a/tests/jinja_context_addons_tests.py b/tests/jinja_context_addons_tests.py deleted file mode 100644 index eab0b9cb5d20..000000000000 --- a/tests/jinja_context_addons_tests.py +++ /dev/null @@ -1,163 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from ipaddress import AddressValueError, NetmaskValueError - -from jinja_context_addons import * - -from tests.base_tests import SupersetTestCase - -# Imports Jinja Context Addons from PYTHONPATH env variable, make sure to point the env var to the dir containing it - - -class Jinja2ContextAddonsTest(SupersetTestCase): - maxDiff = None - - # Test for Correctness - - def test_ipv4str_to_number_template(self) -> None: - rendered = ipv4str_to_number("192.168.0.0") - self.assertEqual(3232235520, rendered) - - def test_render_ipv4_column_template(self) -> None: - test_filter = [ - {"col": "src_ip_num", "op": "==", "val": "1.1.1.1"}, - {"col": "src_ip_num", "op": "IN", "val": ["3.3.3.3", "2.2.2.2"]}, - ] - rendered = render_ipv4_number_column(test_filter, "src_num_ip") - self.assertEqual( - " AND (src_num_ip = 16843009) AND ((src_num_ip = 50529027) OR (src_num_ip = 33686018))", - rendered, - ) - - def test_render_ipv4_either_number_columns_template(self) -> None: - test_filter = [ - {"col": "src_ip_num", "op": "==", "val": ["3.0.0.0/8", "2.2.2.2"]} - ] - rendered = render_ipv4_either_number_columns( - test_filter, "src_num_ip", "dst_num_ip" - ) - self.assertEqual( - " AND ((src_num_ip >= 50331648 AND src_num_ip <= 67108863) OR (src_num_ip = 33686018) OR (dst_num_ip >= 50331648 AND dst_num_ip <= 67108863) OR (dst_num_ip = 33686018))", - rendered, - ) - - def test_render_ipv4_between_number_colums_template(self) -> None: - test_filter = [ - {"col": "src_ip_num", "op": "LIKE", "val": ["12.0.0.0/8", "2.0.0.0/16"]} - ] - rendered = render_ipv4_between_number_colums(test_filter, "1.1.1.1", "2.2.2.2") - self.assertEqual( - """ AND (( (1.1.1.1 <= 201326592 AND 2.2.2.2 >= 201326592) - OR (1.1.1.1 <= 218103807 AND 2.2.2.2 >= 218103807) - OR (201326592 <= 1.1.1.1 AND 2.2.2.2 <= 218103807) ) OR ( (1.1.1.1 <= 33554432 AND 2.2.2.2 >= 33554432) - OR (1.1.1.1 <= 33619967 AND 2.2.2.2 >= 33619967) - OR (33554432 <= 1.1.1.1 AND 2.2.2.2 <= 33619967) ))""", - rendered, - ) - - def test_render_in_conditions_template(self) -> None: - test_ip_array = ["1.1.1.1", "240.0.0.0/4"] - rendered = render_in_conditions(test_ip_array, "src_num_ip") - self.assertEqual( - [ - "(src_num_ip = 16843009)", - "(src_num_ip >= 4026531840 AND src_num_ip <= 4294967295)", - ], - rendered, - ) - - def test_dashboard_link_template(self) -> None: - # TODO Update this test once the dashboard function is fully implemented and complete - test_link_label = "LABEL" - test_dashboard_id = 2301 - test_src_column = "test_col" - test_target_column = "target_col" - - rendered = dashboard_link( - test_link_label, test_dashboard_id, test_src_column, test_target_column - ) - self.assertEqual( - " concat('LABEL' ) ", - rendered, - ) - - # Test for Exceptions - - def test_ipv4str_to_number_template_invalid_ip(self) -> None: - # Invalid Ip 1912.168.0.0 - self.assertRaises(OSError, ipv4str_to_number, "1912.168.0.0") - - def test_render_ipv4_column_template_exception(self) -> None: - # The ValueError in this test case comes from the '3.3.3.3/8' CIDR - # This is because the correct way to describe that range of ip's is to start from 3.0.0.0/8 - test_filter = [ - {"col": "src_ip_num", "op": "==", "val": "1.1.1.1"}, - {"col": "src_ip_num", "op": "IN", "val": ["3.3.3.3/8"]}, - ] - self.assertRaises( - ValueError, render_ipv4_number_column, test_filter, "src_ip_num" - ) - - def test_render_ipv4_either_number_columns_template_invalid_cidr(self) -> None: - # Invalid error cidr comes from 2.2.2.200/34 - test_filter = [ - {"col": "src_ip_num", "op": "==", "val": ["3.0.0.0/8", "2.2.2.200/34"]} - ] - self.assertRaises( - NetmaskValueError, - render_ipv4_either_number_columns, - test_filter, - "src_num_ip", - "dst_num_ip", - ) - - def test_render_ipv4_between_number_colums_template_invalid_arguments(self) -> None: - test_filter = [ - { - "col": "src_ip_num", - "op": "2", - "val": ["255.255.255.255/0", "80.0.0.0/16"], - } - ] - self.assertRaises( - ValueError, - render_ipv4_between_number_colums, - test_filter, - "1.1.1.1", - "2.2.2.2", - ) - - def test_render_in_conditions_template_invalid_cidr(self) -> None: - test_ip_array = ["1.10.0.1.1", "240.0.0.0/4.0"] - self.assertRaises( - AddressValueError, render_in_conditions, test_ip_array, "src_num_ip" - ) - - def test_dashboard_link_template_invalid_label_type(self) -> None: - # TODO Update this test once the dashboard function is fully implemented and complete - test_link_label = 123 - test_dashboard_id = -100 - test_src_column = "test_col" - test_target_column = "target_col" - self.assertRaises( - TypeError, - dashboard_link, - test_link_label, - test_dashboard_id, - test_src_column, - test_target_column, - ) From 93fac496beb6b894cf9f7191bb716be9a04a0191 Mon Sep 17 00:00:00 2001 From: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com> Date: Thu, 15 Sep 2022 12:56:36 -0400 Subject: [PATCH 3/5] Made changes so that the pre-commit hook would pass all tests --- superset/charts/post_processing.py | 8 ++--- superset/proxy/api.py | 18 +++++----- superset/utils/date_parser.py | 2 +- .../reports/commands_tests.py | 35 ------------------- .../migrations/shared/utils_test.py | 4 ++- 5 files changed, 17 insertions(+), 50 deletions(-) diff --git a/superset/charts/post_processing.py b/superset/charts/post_processing.py index db957ca94e14..44131afe942b 100644 --- a/superset/charts/post_processing.py +++ b/superset/charts/post_processing.py @@ -378,7 +378,7 @@ def apply_post_process( if result["query_context"].result_format == ChartDataResultFormat.CSV: for query in result["queries"]: df = pd.read_csv(StringIO(query["data"])) - processed_df = post_processor(df, form_data) + processed_df = post_processor(df, form_data) # type: ignore buf = StringIO() processed_df.to_csv(buf) @@ -389,9 +389,9 @@ def apply_post_process( query["coltypes"] = extract_dataframe_dtypes(processed_df) query["rowcount"] = len(processed_df.index) else: - result = post_processor(result, form_data) + result = post_processor(result, form_data) # type: ignore if viz_type in rawPostProcess: - result = post_processor(result, form_data) + result = post_processor(result, form_data) # type: ignore else: for query in result["queries"]: if query["result_format"] == ChartDataResultFormat.JSON: @@ -401,7 +401,7 @@ def apply_post_process( else: raise Exception(f"Result format {query['result_format']} not supported") - processed_df = post_processor(df, form_data, datasource) + processed_df = post_processor(df, form_data, datasource) # type: ignore query["colnames"] = list(processed_df.columns) query["indexnames"] = list(processed_df.index) diff --git a/superset/proxy/api.py b/superset/proxy/api.py index 3b0d3ae09e6c..1defcf8ec8f5 100644 --- a/superset/proxy/api.py +++ b/superset/proxy/api.py @@ -34,23 +34,23 @@ class ProxyRestAPI(BaseSupersetModelRestApi): openapi_spec_tag = "Proxy" - def __init__(self): + def __init__(self) -> None: """ This is the init function for the ProxyRestAPI class """ super().__init__() - self.ALFRED_SCOPE = os.environ.get("ALFRED_SCOPE") + self.ALFRED_SCOPE = str(os.environ.get("ALFRED_SCOPE")) - self.ALFRED_URL = os.environ.get("ALFRED_URL") + self.ALFRED_URL = str(os.environ.get("ALFRED_URL")) if os.environ.get("FLASK_ENV") == "development": - self.SSL_CERT = os.environ.get("REQUESTS_CA_BUNDLE_DEV") + self.SSL_CERT = str(os.environ.get("REQUESTS_CA_BUNDLE_DEV")) else: - self.SSL_CERT = os.environ.get("REQUESTS_CA_BUNDLE") + self.SSL_CERT = str(os.environ.get("REQUESTS_CA_BUNDLE")) def attach_url( - self, response_code: int, app_url: str, err: bool, payload + self, response_code: int, app_url: str, err: bool, payload: str ) -> Response: """ This is a function that will attach the app URL with the response that is @@ -133,18 +133,18 @@ def make_alfred_connection(self, url: str) -> Response: except (requests.exceptions.HTTPError, Exception) as err: return self.error_obtaining_token("Alfred", err) else: - headers = CaseInsensitiveDict() + headers = CaseInsensitiveDict() # type: ignore headers["Accept"] = "application/json" headers["Authorization"] = f"Bearer { alfred_token }" alfred_resp = "" try: - alfred_resp = requests.get(url, headers=headers, verify=self.SSL_CERT) + alfred_resp = requests.get(url, headers=headers, verify=self.SSL_CERT) # type: ignore except requests.exceptions.ConnectionError as err: return self.error_obtaining_response("Alfred", err) refresh_resp_json = json.loads( - alfred_resp.content.decode("utf8", "replace") + alfred_resp.content.decode("utf8", "replace") # type: ignore ) return self.attach_url(200, self.ALFRED_URL, False, refresh_resp_json) diff --git a/superset/utils/date_parser.py b/superset/utils/date_parser.py index ec35390aa1e2..69f7542cdeba 100644 --- a/superset/utils/date_parser.py +++ b/superset/utils/date_parser.py @@ -175,7 +175,7 @@ def get_since_until( # pylint: disable=too-many-arguments,too-many-locals,too-m - Next X seconds/minutes/hours/days/weeks/months/years """ - config = app.config + config = app.config # type: ignore default_relative_start = config["DEFAULT_RELATIVE_START_TIME"] default_relative_end = config["DEFAULT_RELATIVE_END_TIME"] diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py index 6c884786f977..dd23d291fd69 100644 --- a/tests/integration_tests/reports/commands_tests.py +++ b/tests/integration_tests/reports/commands_tests.py @@ -1081,41 +1081,6 @@ def test_email_dashboard_report_schedule_force_screenshot( assert_log(ReportState.SUCCESS) -@pytest.mark.usefixtures( - "load_birth_names_dashboard_with_slices", - "create_report_email_dashboard_force_screenshot", -) -@patch("superset.reports.notifications.email.send_email_smtp") -@patch("superset.utils.screenshots.DashboardScreenshot.get_screenshot") -def test_email_dashboard_report_schedule_force_screenshot( - screenshot_mock, email_mock, create_report_email_dashboard_force_screenshot -): - """ - ExecuteReport Command: Test dashboard email report schedule - """ - # setup screenshot mock - screenshot_mock.return_value = SCREENSHOT_FILE - - with freeze_time("2020-01-01T00:00:00Z"): - AsyncExecuteReportScheduleCommand( - TEST_ID, - create_report_email_dashboard_force_screenshot.id, - datetime.utcnow(), - ).run() - - notification_targets = get_target_from_report_schedule( - create_report_email_dashboard_force_screenshot - ) - - # Assert the email smtp address - assert email_mock.call_args[0][0] == notification_targets[0] - # Assert the email inline screenshot - smtp_images = email_mock.call_args[1]["images"] - assert smtp_images[list(smtp_images.keys())[0]] == SCREENSHOT_FILE - # Assert logs are correct - assert_log(ReportState.SUCCESS) - - @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_slack_chart" ) diff --git a/tests/unit_tests/migrations/shared/utils_test.py b/tests/unit_tests/migrations/shared/utils_test.py index cb5b2cbd0e82..86c496a0ed7c 100644 --- a/tests/unit_tests/migrations/shared/utils_test.py +++ b/tests/unit_tests/migrations/shared/utils_test.py @@ -29,7 +29,9 @@ def test_extract_table_references(mocker: MockerFixture, app_context: None) -> N """ Test the ``extract_table_references`` helper function. """ - from superset.migrations.shared.utils import extract_table_references + from superset.migrations.shared.utils import ( # type: ignore + extract_table_references, + ) assert extract_table_references("SELECT 1", "trino") == set() assert extract_table_references("SELECT 1 FROM some_table", "trino") == { From 09ef913a9bfd5a4a441ee4697b3c88e9aabf6b46 Mon Sep 17 00:00:00 2001 From: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com> Date: Thu, 15 Sep 2022 14:23:03 -0400 Subject: [PATCH 4/5] Temp update to build img --- cccs-build/superset/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cccs-build/superset/Dockerfile b/cccs-build/superset/Dockerfile index 27640b957bc5..ca07595504fe 100644 --- a/cccs-build/superset/Dockerfile +++ b/cccs-build/superset/Dockerfile @@ -1,7 +1,7 @@ # Vault CA container import ARG VAULT_CA_CONTAINER=uchimera.azurecr.io/cccs/hogwarts/vault-ca:master_2921_22315d60 FROM $VAULT_CA_CONTAINER AS vault_ca -FROM uchimera.azurecr.io/cccs/superset-base:cccs-2.0_20220914141024_b4860 +FROM uchimera.azurecr.io/cccs/superset-base:feature_CLDN-1563_20220915180428_b4879 USER root From bcbf01c3d6a33042533a228125ef2173a3a2eff5 Mon Sep 17 00:00:00 2001 From: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com> Date: Thu, 15 Sep 2022 14:29:02 -0400 Subject: [PATCH 5/5] revert temp change to build img --- cccs-build/superset/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cccs-build/superset/Dockerfile b/cccs-build/superset/Dockerfile index ca07595504fe..27640b957bc5 100644 --- a/cccs-build/superset/Dockerfile +++ b/cccs-build/superset/Dockerfile @@ -1,7 +1,7 @@ # Vault CA container import ARG VAULT_CA_CONTAINER=uchimera.azurecr.io/cccs/hogwarts/vault-ca:master_2921_22315d60 FROM $VAULT_CA_CONTAINER AS vault_ca -FROM uchimera.azurecr.io/cccs/superset-base:feature_CLDN-1563_20220915180428_b4879 +FROM uchimera.azurecr.io/cccs/superset-base:cccs-2.0_20220914141024_b4860 USER root