From 731cfb1553f6004084e6ad1e81f58501b3a46b26 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Wed, 26 Jun 2024 12:01:26 -0700 Subject: [PATCH 01/31] chore(tests): Mark TestConnectionDatabaseCommand as non-test related (#29366) --- superset/commands/database/test_connection.py | 1 + 1 file changed, 1 insertion(+) diff --git a/superset/commands/database/test_connection.py b/superset/commands/database/test_connection.py index 7c38ab68a3ae..7979901bca0a 100644 --- a/superset/commands/database/test_connection.py +++ b/superset/commands/database/test_connection.py @@ -64,6 +64,7 @@ def get_log_connection_action( class TestConnectionDatabaseCommand(BaseCommand): + __test__ = False _model: Optional[Database] = None _context: dict[str, Any] _uri: str From 53450b7e2f585e2b3ccff614b142ea03a18a504e Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Wed, 26 Jun 2024 12:01:50 -0700 Subject: [PATCH 02/31] fix(tox): Address issue with generative environment variables (#29368) --- pyproject.toml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 65471b0cdcdb..771761130836 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -252,9 +252,8 @@ setenv = SUPERSET_HOME = {envtmpdir} mysql: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8 postgres: SUPERSET__SQLALCHEMY_DATABASE_URI = postgresql+psycopg2://superset:superset@localhost/test - sqlite: - SUPERSET__SQLALCHEMY_DATABASE_URI = sqlite:////{envtmpdir}/superset.db - SUPERSET__SQLALCHEMY_EXAMPLES_URI = sqlite:////{envtmpdir}/examples.db + sqlite: SUPERSET__SQLALCHEMY_DATABASE_URI = sqlite:////{envtmpdir}/superset.db + sqlite: SUPERSET__SQLALCHEMY_EXAMPLES_URI = sqlite:////{envtmpdir}/examples.db mysql-presto: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8 # docker run -p 8080:8080 --name presto starburstdata/presto mysql-presto: SUPERSET__SQLALCHEMY_EXAMPLES_URI = presto://localhost:8080/memory/default From 5948dac88940dbe38275b002d30ad33b1b55e698 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Wed, 26 Jun 2024 12:10:05 -0700 Subject: [PATCH 03/31] chore(ci): Start Celery worker as a background process (#29371) --- .github/workflows/bashlib.sh | 14 ++++++++++++++ .../superset-python-integrationtest.yml | 18 ++++++++++++------ .../workflows/superset-python-presto-hive.yml | 12 ++++++++---- 3 files changed, 34 insertions(+), 10 deletions(-) diff --git a/.github/workflows/bashlib.sh b/.github/workflows/bashlib.sh index 82840becb83b..31e7d04b23bc 100644 --- a/.github/workflows/bashlib.sh +++ b/.github/workflows/bashlib.sh @@ -117,6 +117,20 @@ testdata() { say "::endgroup::" } +celery-worker() { + cd "$GITHUB_WORKSPACE" + say "::group::Start Celery worker" + # must specify PYTHONPATH to make `tests.superset_test_config` importable + export PYTHONPATH="$GITHUB_WORKSPACE" + celery \ + --app=superset.tasks.celery_app:app \ + worker \ + --concurrency=2 \ + --detach \ + --optimization=fair + say "::endgroup::" +} + cypress-install() { cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base" diff --git a/.github/workflows/superset-python-integrationtest.yml b/.github/workflows/superset-python-integrationtest.yml index 3f43bef88c10..80b2a3b98b75 100644 --- a/.github/workflows/superset-python-integrationtest.yml +++ b/.github/workflows/superset-python-integrationtest.yml @@ -58,9 +58,11 @@ jobs: uses: ./.github/actions/cached-dependencies with: run: setup-mysql - - name: Run celery + - name: Start Celery worker if: steps.check.outputs.python - run: celery --app=superset.tasks.celery_app:app worker -Ofair -c 2 & + uses: ./.github/actions/cached-dependencies + with: + run: celery-worker - name: Python integration tests (MySQL) if: steps.check.outputs.python run: | @@ -117,9 +119,11 @@ jobs: with: run: | setup-postgres - - name: Run celery + - name: Start Celery worker if: steps.check.outputs.python - run: celery --app=superset.tasks.celery_app:app worker -Ofair -c 2 & + uses: ./.github/actions/cached-dependencies + with: + run: celery-worker - name: Python integration tests (PostgreSQL) if: steps.check.outputs.python run: | @@ -167,9 +171,11 @@ jobs: run: | # sqlite needs this working directory mkdir ${{ github.workspace }}/.temp - - name: Run celery + - name: Start Celery worker if: steps.check.outputs.python - run: celery --app=superset.tasks.celery_app:app worker -Ofair -c 2 & + uses: ./.github/actions/cached-dependencies + with: + run: celery-worker - name: Python integration tests (SQLite) if: steps.check.outputs.python run: | diff --git a/.github/workflows/superset-python-presto-hive.yml b/.github/workflows/superset-python-presto-hive.yml index 30dbaf7d40cd..6ab65430b406 100644 --- a/.github/workflows/superset-python-presto-hive.yml +++ b/.github/workflows/superset-python-presto-hive.yml @@ -67,9 +67,11 @@ jobs: run: | echo "${{ steps.check.outputs.python }}" setup-postgres - - name: Run celery + - name: Start Celery worker if: steps.check.outputs.python - run: celery --app=superset.tasks.celery_app:app worker -Ofair -c 2 & + uses: ./.github/actions/cached-dependencies + with: + run: celery-worker - name: Python unit tests (PostgreSQL) if: steps.check.outputs.python run: | @@ -132,9 +134,11 @@ jobs: uses: ./.github/actions/cached-dependencies with: run: setup-postgres - - name: Run celery + - name: Start Celery worker if: steps.check.outputs.python - run: celery --app=superset.tasks.celery_app:app worker -Ofair -c 2 & + uses: ./.github/actions/cached-dependencies + with: + run: celery-worker - name: Python unit tests (PostgreSQL) if: steps.check.outputs.python run: | From 466dda2b147e2e1f54e2af160971aad0531e1051 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Wed, 26 Jun 2024 12:57:51 -0700 Subject: [PATCH 04/31] chore(security): Clean up session/commit logic (#29381) --- superset/security/manager.py | 25 +++++++------------------ 1 file changed, 7 insertions(+), 18 deletions(-) diff --git a/superset/security/manager.py b/superset/security/manager.py index a67870a3989e..ea2ee5ef83a1 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -1017,9 +1017,9 @@ def clean_perms(self) -> None: == None, # noqa: E711 ) ) - self.get_session.commit() if deleted_count := pvms.delete(): logger.info("Deleted %i faulty permissions", deleted_count) + self.get_session.commit() def sync_role_definitions(self) -> None: """ @@ -1047,9 +1047,6 @@ def sync_role_definitions(self) -> None: ) self.create_missing_perms() - - # commit role and view menu updates - self.get_session.commit() self.clean_perms() def _get_all_pvms(self) -> list[PermissionView]: @@ -2446,8 +2443,7 @@ def get_rls_filters(self, table: "BaseDatasource") -> list[SqlaQuery]: user_roles = [role.id for role in self.get_user_roles(g.user)] regular_filter_roles = ( - self.get_session() - .query(RLSFilterRoles.c.rls_filter_id) + self.get_session.query(RLSFilterRoles.c.rls_filter_id) .join(RowLevelSecurityFilter) .filter( RowLevelSecurityFilter.filter_type == RowLevelSecurityFilterType.REGULAR @@ -2455,22 +2451,18 @@ def get_rls_filters(self, table: "BaseDatasource") -> list[SqlaQuery]: .filter(RLSFilterRoles.c.role_id.in_(user_roles)) ) base_filter_roles = ( - self.get_session() - .query(RLSFilterRoles.c.rls_filter_id) + self.get_session.query(RLSFilterRoles.c.rls_filter_id) .join(RowLevelSecurityFilter) .filter( RowLevelSecurityFilter.filter_type == RowLevelSecurityFilterType.BASE ) .filter(RLSFilterRoles.c.role_id.in_(user_roles)) ) - filter_tables = ( - self.get_session() - .query(RLSFilterTables.c.rls_filter_id) - .filter(RLSFilterTables.c.table_id == table.id) + filter_tables = self.get_session.query(RLSFilterTables.c.rls_filter_id).filter( + RLSFilterTables.c.table_id == table.id ) query = ( - self.get_session() - .query( + self.get_session.query( RowLevelSecurityFilter.id, RowLevelSecurityFilter.group_key, RowLevelSecurityFilter.clause, @@ -2673,12 +2665,9 @@ def raise_for_ownership(self, resource: Model) -> None: :raises SupersetSecurityException: If the current user is not an owner """ - # pylint: disable=import-outside-toplevel - from superset import db - if self.is_admin(): return - orig_resource = db.session.query(resource.__class__).get(resource.id) + orig_resource = self.get_session.query(resource.__class__).get(resource.id) owners = orig_resource.owners if hasattr(orig_resource, "owners") else [] if g.user.is_anonymous or g.user not in owners: From 9d405738c07e7204e068d04953afb62d8494c460 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Thu, 27 Jun 2024 07:28:50 -0700 Subject: [PATCH 05/31] chore(tests): Remove unnecessary mock (#29386) --- tests/integration_tests/databases/api_tests.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index 9f9882c99bde..d4a1ac08c21c 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -839,16 +839,12 @@ def test_get_database_returns_related_ssh_tunnel( db.session.delete(model) db.session.commit() - @mock.patch( - "superset.commands.database.test_connection.TestConnectionDatabaseCommand.run", - ) @mock.patch("superset.models.core.Database.get_all_catalog_names") @mock.patch("superset.models.core.Database.get_all_schema_names") def test_if_ssh_tunneling_flag_is_not_active_it_raises_new_exception( self, mock_get_all_schema_names, mock_get_all_catalog_names, - mock_test_connection_database_command_run, ): """ Database API: Test raises SSHTunneling feature flag not enabled From 59d1eea4c8ccbce8869ca4d6abf942bce34b4b14 Mon Sep 17 00:00:00 2001 From: Daniel Vaz Gaspar Date: Thu, 27 Jun 2024 18:25:07 +0100 Subject: [PATCH 06/31] ci: remove update repo on issue comment (#29388) --- .../workflows/update-monorepo-lockfiles.yml | 52 ------------------- 1 file changed, 52 deletions(-) delete mode 100644 .github/workflows/update-monorepo-lockfiles.yml diff --git a/.github/workflows/update-monorepo-lockfiles.yml b/.github/workflows/update-monorepo-lockfiles.yml deleted file mode 100644 index 820384d8df8b..000000000000 --- a/.github/workflows/update-monorepo-lockfiles.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Update Lockfiles for Dependabot Monorepo PRs - -on: - pull_request: - paths: - - 'superset-frontend/packages/**/package.json' - - 'superset-frontend/plugins/**/package.json' - types: [opened, synchronize, reopened] - issue_comment: - types: [created] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.event.issue.number || github.run_id }} - cancel-in-progress: true - -jobs: - update-lock-file: - permissions: - contents: write - pull-requests: write - runs-on: ubuntu-latest - if: > - (github.event_name == 'pull_request' && github.event.pull_request.user.login == 'dependabot[bot]') || - (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot trigger-dependabot-lockfile') && github.event.issue.pull_request && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER')) - defaults: - run: - working-directory: superset-frontend - steps: - - name: Checkout Code - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.ref || github.head_ref }} # Checkout the branch that made the PR or the comment's PR branch - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: '18' - - - name: Install Dependencies and Update Lock File - run: | - npm install - - - name: Commit and Push Changes - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - git config user.name "GitHub-Actions[bot]" - git config user.email "github-actions[bot]@users.noreply.github.com" - git add package-lock.json - # Push the changes back to the branch if they exist, and pass if there are no changes - git diff --staged --quiet || (git commit -m "Update lock file for Dependabot PR" -a && git push https://${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }} ${{ github.event.pull_request.head.ref || github.head_ref }}) From 9c5c124155cd9bf111ba9d1c16fe24f1bb7bda33 Mon Sep 17 00:00:00 2001 From: Jan Suleiman Date: Thu, 27 Jun 2024 19:25:53 +0200 Subject: [PATCH 07/31] docs: fix typo in docker compose doc (#29390) --- docs/docs/installation/docker-compose.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/installation/docker-compose.mdx b/docs/docs/installation/docker-compose.mdx index 82c492de865c..9e785cd19fb2 100644 --- a/docs/docs/installation/docker-compose.mdx +++ b/docs/docs/installation/docker-compose.mdx @@ -23,7 +23,7 @@ documentation. ::: -As mentioned in our [quickstart guidee](/docs/quickstart), The fastest way to try +As mentioned in our [quickstart guide](/docs/quickstart), the fastest way to try Superset locally is using Docker Compose on a Linux or Mac OSX computer. Superset does not have official support for Windows. It's also the easiest way to launch a fully functioning **development environment** quickly. From c70a9d0e674d1b52bcdf17a22a8450ee7a3814b2 Mon Sep 17 00:00:00 2001 From: Evan Rusackas Date: Thu, 27 Jun 2024 11:30:59 -0600 Subject: [PATCH 08/31] fix(readme): changing video from mp4 to webm format (#29392) --- README.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/README.md b/README.md index 408cad422b1b..4b9b6d5485e9 100644 --- a/README.md +++ b/README.md @@ -73,9 +73,7 @@ Superset provides: **Video Overview** - +[superset-video-4k.webm](https://github.com/apache/superset/assets/812905/da036bc2-150c-4ee7-80f9-75e63210ff76)
From 2a587a771cad037b89566ce63dfdd16545a3b250 Mon Sep 17 00:00:00 2001 From: Geido <60598000+geido@users.noreply.github.com> Date: Fri, 28 Jun 2024 15:16:05 +0200 Subject: [PATCH 09/31] feat: Add Ant Design 5 Theme (#29328) --- superset-frontend/package-lock.json | 1749 +++++++++++++++-- superset-frontend/package.json | 2 + .../components/AntdThemeProvider/index.tsx | 27 + superset-frontend/src/theme/index.ts | 69 + superset-frontend/src/theme/light.ts | 119 ++ .../src/views/RootContextProviders.tsx | 50 +- 6 files changed, 1824 insertions(+), 192 deletions(-) create mode 100644 superset-frontend/src/components/AntdThemeProvider/index.tsx create mode 100644 superset-frontend/src/theme/index.ts create mode 100644 superset-frontend/src/theme/light.ts diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 66d71dad934a..07871cab9be2 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -57,6 +57,7 @@ "abortcontroller-polyfill": "^1.1.9", "ace-builds": "^1.4.14", "antd": "4.10.3", + "antd-v5": "npm:antd@^5.18.0", "babel-plugin-typescript-to-proptypes": "^2.0.0", "bootstrap": "^3.4.1", "brace": "^0.11.1", @@ -94,6 +95,7 @@ "polished": "^4.3.1", "prop-types": "^15.7.2", "query-string": "^6.13.7", + "rc-trigger": "^5.3.4", "re-resizable": "^6.9.11", "react": "^16.13.1", "react-ace": "^10.1.0", @@ -346,20 +348,38 @@ } }, "node_modules/@ant-design/colors": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@ant-design/colors/-/colors-7.0.0.tgz", - "integrity": "sha512-iVm/9PfGCbC0dSMBrz7oiEXZaaGH7ceU40OJEfKmyuzR9R5CRimJYPlRiFtMQGQcbNMea/ePcoIebi4ASGYXtg==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@ant-design/colors/-/colors-7.0.2.tgz", + "integrity": "sha512-7KJkhTiPiLHSu+LmMJnehfJ6242OCxSlR3xHVBecYxnMW8MS/878NXct1GqYARyL59fyeFdKRxXTfvR9SnDgJg==", "dependencies": { - "@ctrl/tinycolor": "^3.4.0" + "@ctrl/tinycolor": "^3.6.1" + } + }, + "node_modules/@ant-design/cssinjs": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@ant-design/cssinjs/-/cssinjs-1.21.0.tgz", + "integrity": "sha512-gIilraPl+9EoKdYxnupxjHB/Q6IHNRjEXszKbDxZdsgv4sAZ9pjkCq8yanDWNvyfjp4leir2OVAJm0vxwKK8YA==", + "dependencies": { + "@babel/runtime": "^7.11.1", + "@emotion/hash": "^0.8.0", + "@emotion/unitless": "^0.7.5", + "classnames": "^2.3.1", + "csstype": "^3.1.3", + "rc-util": "^5.35.0", + "stylis": "^4.0.13" + }, + "peerDependencies": { + "react": ">=16.0.0", + "react-dom": ">=16.0.0" } }, "node_modules/@ant-design/icons": { - "version": "5.2.6", - "resolved": "https://registry.npmjs.org/@ant-design/icons/-/icons-5.2.6.tgz", - "integrity": "sha512-4wn0WShF43TrggskBJPRqCD0fcHbzTYjnaoskdiJrVHg86yxoZ8ZUqsXvyn4WUqehRiFKnaclOhqk9w4Ui2KVw==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@ant-design/icons/-/icons-5.3.7.tgz", + "integrity": "sha512-bCPXTAg66f5bdccM4TT21SQBDO1Ek2gho9h3nO9DAKXJP4sq+5VBjrQMSxMVXSB3HyEz+cUbHQ5+6ogxCOpaew==", "dependencies": { "@ant-design/colors": "^7.0.0", - "@ant-design/icons-svg": "^4.3.0", + "@ant-design/icons-svg": "^4.4.0", "@babel/runtime": "^7.11.2", "classnames": "^2.2.6", "rc-util": "^5.31.1" @@ -373,9 +393,24 @@ } }, "node_modules/@ant-design/icons-svg": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/@ant-design/icons-svg/-/icons-svg-4.3.1.tgz", - "integrity": "sha512-4QBZg8ccyC6LPIRii7A0bZUk3+lEDCLnhB+FVsflGdcWPPmV+j3fire4AwwoqHV/BibgvBmR9ZIo4s867smv+g==" + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@ant-design/icons-svg/-/icons-svg-4.4.2.tgz", + "integrity": "sha512-vHbT+zJEVzllwP+CM+ul7reTEfBR0vgxFe7+lREAsAA7YGsYpboiq2sQNeQeRvh09GfQgs/GyFEvZpJ9cLXpXA==" + }, + "node_modules/@ant-design/react-slick": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@ant-design/react-slick/-/react-slick-1.1.2.tgz", + "integrity": "sha512-EzlvzE6xQUBrZuuhSAFTdsr4P2bBBHGZwKFemEfq8gIGyIQCxalYfZW/T2ORbtQx5rU69o+WycP3exY/7T1hGA==", + "dependencies": { + "@babel/runtime": "^7.10.4", + "classnames": "^2.2.5", + "json2mq": "^0.2.0", + "resize-observer-polyfill": "^1.5.1", + "throttle-debounce": "^5.0.0" + }, + "peerDependencies": { + "react": ">=16.9.0" + } }, "node_modules/@applitools/core": { "version": "4.12.2", @@ -3663,9 +3698,9 @@ "dev": true }, "node_modules/@babel/runtime": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.9.tgz", - "integrity": "sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.7.tgz", + "integrity": "sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -3827,9 +3862,9 @@ } }, "node_modules/@ctrl/tinycolor": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-3.4.1.tgz", - "integrity": "sha512-ej5oVy6lykXsvieQtqZxCOaLT+xD4+QNarq78cIYISHmZXshCvROLudpQN3lfL8G0NL7plMSSK+zlyvCaIJ4Iw==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-3.6.1.tgz", + "integrity": "sha512-SITSV6aIXsuVNV3f3O0f2n/cgyEDWoSqtZMYiAmcsYHydcKrOz3gUxB/iXd/Qf08+IZX4KpgNbvUdMBmWz+kcA==", "engines": { "node": ">=10" } @@ -13464,6 +13499,144 @@ "@babel/runtime": "^7.13.10" } }, + "node_modules/@rc-component/async-validator": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@rc-component/async-validator/-/async-validator-5.0.4.tgz", + "integrity": "sha512-qgGdcVIF604M9EqjNF0hbUTz42bz/RDtxWdWuU5EQe3hi7M8ob54B6B35rOsvX5eSvIHIzT9iH1R3n+hk3CGfg==", + "dependencies": { + "@babel/runtime": "^7.24.4" + }, + "engines": { + "node": ">=14.x" + } + }, + "node_modules/@rc-component/color-picker": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/@rc-component/color-picker/-/color-picker-1.5.3.tgz", + "integrity": "sha512-+tGGH3nLmYXTalVe0L8hSZNs73VTP5ueSHwUlDC77KKRaN7G4DS4wcpG5DTDzdcV/Yas+rzA6UGgIyzd8fS4cw==", + "dependencies": { + "@babel/runtime": "^7.23.6", + "@ctrl/tinycolor": "^3.6.1", + "classnames": "^2.2.6", + "rc-util": "^5.38.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/context": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@rc-component/context/-/context-1.4.0.tgz", + "integrity": "sha512-kFcNxg9oLRMoL3qki0OMxK+7g5mypjgaaJp/pkOis/6rVxma9nJBF/8kCIuTYHUQNr0ii7MxqE33wirPZLJQ2w==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "rc-util": "^5.27.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/mini-decimal": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rc-component/mini-decimal/-/mini-decimal-1.1.0.tgz", + "integrity": "sha512-jS4E7T9Li2GuYwI6PyiVXmxTiM6b07rlD9Ge8uGZSCz3WlzcG5ZK7g5bbuKNeZ9pgUuPK/5guV781ujdVpm4HQ==", + "dependencies": { + "@babel/runtime": "^7.18.0" + }, + "engines": { + "node": ">=8.x" + } + }, + "node_modules/@rc-component/mutate-observer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rc-component/mutate-observer/-/mutate-observer-1.1.0.tgz", + "integrity": "sha512-QjrOsDXQusNwGZPf4/qRQasg7UFEj06XiCJ8iuiq/Io7CrHrgVi6Uuetw60WAMG1799v+aM8kyc+1L/GBbHSlw==", + "dependencies": { + "@babel/runtime": "^7.18.0", + "classnames": "^2.3.2", + "rc-util": "^5.24.4" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/portal": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@rc-component/portal/-/portal-1.1.2.tgz", + "integrity": "sha512-6f813C0IsasTZms08kfA8kPAGxbbkYToa8ALaiDIGGECU4i9hj8Plgbx0sNJDrey3EtHO30hmdaxtT0138xZcg==", + "dependencies": { + "@babel/runtime": "^7.18.0", + "classnames": "^2.3.2", + "rc-util": "^5.24.4" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/tour": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@rc-component/tour/-/tour-1.15.0.tgz", + "integrity": "sha512-h6hyILDwL+In9GAgRobwRWihLqqsD7Uft3fZGrJ7L4EiyCoxbnNYwzPXDfz7vNDhWeVyvAWQJj9fJCzpI4+b4g==", + "dependencies": { + "@babel/runtime": "^7.18.0", + "@rc-component/portal": "^1.0.0-9", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.3.2", + "rc-util": "^5.24.4" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/trigger": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@rc-component/trigger/-/trigger-2.2.0.tgz", + "integrity": "sha512-QarBCji02YE9aRFhZgRZmOpXBj0IZutRippsVBv85sxvG4FGk/vRxwAlkn3MS9zK5mwbETd86mAVg2tKqTkdJA==", + "dependencies": { + "@babel/runtime": "^7.23.2", + "@rc-component/portal": "^1.1.0", + "classnames": "^2.3.2", + "rc-motion": "^2.0.0", + "rc-resize-observer": "^1.3.1", + "rc-util": "^5.38.0" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/trigger/node_modules/rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "dependencies": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, "node_modules/@react-dnd/asap": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@react-dnd/asap/-/asap-4.0.0.tgz", @@ -25908,6 +26081,372 @@ "react-dom": ">=16.9.0" } }, + "node_modules/antd-v5": { + "name": "antd", + "version": "5.18.3", + "resolved": "https://registry.npmjs.org/antd/-/antd-5.18.3.tgz", + "integrity": "sha512-Dm3P8HBxoo/DiR/QZLj5Mk+rQZsSXxCCArSZACHGiklkkjW6klzlebAElOUr9NyDeFX7UnQ6LVk7vznXlnjTqQ==", + "dependencies": { + "@ant-design/colors": "^7.0.2", + "@ant-design/cssinjs": "^1.21.0", + "@ant-design/icons": "^5.3.7", + "@ant-design/react-slick": "~1.1.2", + "@babel/runtime": "^7.24.7", + "@ctrl/tinycolor": "^3.6.1", + "@rc-component/color-picker": "~1.5.3", + "@rc-component/mutate-observer": "^1.1.0", + "@rc-component/tour": "~1.15.0", + "@rc-component/trigger": "^2.2.0", + "classnames": "^2.5.1", + "copy-to-clipboard": "^3.3.3", + "dayjs": "^1.11.11", + "qrcode.react": "^3.1.0", + "rc-cascader": "~3.26.0", + "rc-checkbox": "~3.3.0", + "rc-collapse": "~3.7.3", + "rc-dialog": "~9.5.2", + "rc-drawer": "~7.2.0", + "rc-dropdown": "~4.2.0", + "rc-field-form": "~2.2.1", + "rc-image": "~7.9.0", + "rc-input": "~1.5.1", + "rc-input-number": "~9.1.0", + "rc-mentions": "~2.14.0", + "rc-menu": "~9.14.0", + "rc-motion": "^2.9.2", + "rc-notification": "~5.6.0", + "rc-pagination": "~4.0.4", + "rc-picker": "~4.5.0", + "rc-progress": "~4.0.0", + "rc-rate": "~2.13.0", + "rc-resize-observer": "^1.4.0", + "rc-segmented": "~2.3.0", + "rc-select": "~14.14.0", + "rc-slider": "~10.6.2", + "rc-steps": "~6.0.1", + "rc-switch": "~4.1.0", + "rc-table": "~7.45.7", + "rc-tabs": "~15.1.1", + "rc-textarea": "~1.7.0", + "rc-tooltip": "~6.2.0", + "rc-tree": "~5.8.8", + "rc-tree-select": "~5.21.0", + "rc-upload": "~4.5.2", + "rc-util": "^5.43.0", + "scroll-into-view-if-needed": "^3.1.0", + "throttle-debounce": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/ant-design" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/compute-scroll-into-view": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.1.0.tgz", + "integrity": "sha512-rj8l8pD4bJ1nx+dAkMhV1xB5RuZEyVysfxJqB1pRchh1KVvwOv9b7CGB8ZfjTImVv2oF+sYMUkMZq6Na5Ftmbg==" + }, + "node_modules/antd-v5/node_modules/rc-cascader": { + "version": "3.26.0", + "resolved": "https://registry.npmjs.org/rc-cascader/-/rc-cascader-3.26.0.tgz", + "integrity": "sha512-L1dml383TPSJD1I11YwxuVbmqaJY64psZqFp1ETlgl3LEOwDu76Cyl11fw5dmjJhMlUWwM5dECQfqJgfebhUjg==", + "dependencies": { + "@babel/runtime": "^7.12.5", + "array-tree-filter": "^2.1.0", + "classnames": "^2.3.1", + "rc-select": "~14.14.0", + "rc-tree": "~5.8.1", + "rc-util": "^5.37.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-checkbox": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/rc-checkbox/-/rc-checkbox-3.3.0.tgz", + "integrity": "sha512-Ih3ZaAcoAiFKJjifzwsGiT/f/quIkxJoklW4yKGho14Olulwn8gN7hOBve0/WGDg5o/l/5mL0w7ff7/YGvefVw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.3.2", + "rc-util": "^5.25.2" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-collapse": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/rc-collapse/-/rc-collapse-3.7.3.tgz", + "integrity": "sha512-60FJcdTRn0X5sELF18TANwtVi7FtModq649H11mYF1jh83DniMoM4MqY627sEKRCTm4+WXfGDcB7hY5oW6xhyw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-motion": "^2.3.4", + "rc-util": "^5.27.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-dropdown": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/rc-dropdown/-/rc-dropdown-4.2.0.tgz", + "integrity": "sha512-odM8Ove+gSh0zU27DUj5cG1gNKg7mLWBYzB5E4nNLrLwBmYEgYP43vHKDGOVZcJSVElQBI0+jTQgjnq0NfLjng==", + "dependencies": { + "@babel/runtime": "^7.18.3", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.2.6", + "rc-util": "^5.17.0" + }, + "peerDependencies": { + "react": ">=16.11.0", + "react-dom": ">=16.11.0" + } + }, + "node_modules/antd-v5/node_modules/rc-field-form": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/rc-field-form/-/rc-field-form-2.2.1.tgz", + "integrity": "sha512-uoNqDoR7A4tn4QTSqoWPAzrR7ZwOK5I+vuZ/qdcHtbKx+ZjEsTg7QXm2wk/jalDiSksAQmATxL0T5LJkRREdIA==", + "dependencies": { + "@babel/runtime": "^7.18.0", + "@rc-component/async-validator": "^5.0.3", + "rc-util": "^5.32.2" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-input-number": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/rc-input-number/-/rc-input-number-9.1.0.tgz", + "integrity": "sha512-NqJ6i25Xn/AgYfVxynlevIhX3FuKlMwIFpucGG1h98SlK32wQwDK0zhN9VY32McOmuaqzftduNYWWooWz8pXQA==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/mini-decimal": "^1.0.1", + "classnames": "^2.2.5", + "rc-input": "~1.5.0", + "rc-util": "^5.40.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-mentions": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/rc-mentions/-/rc-mentions-2.14.0.tgz", + "integrity": "sha512-qKR59FMuF8PK4ZqsbWX3UuA5P1M/snzyqV6Yt3y1DCFbCEdqUGIBgQp6vEfLCO6Z0RoRFlzXtCeSlBTcDDpg1A==", + "dependencies": { + "@babel/runtime": "^7.22.5", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.2.6", + "rc-input": "~1.5.0", + "rc-menu": "~9.14.0", + "rc-textarea": "~1.7.0", + "rc-util": "^5.34.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-menu": { + "version": "9.14.1", + "resolved": "https://registry.npmjs.org/rc-menu/-/rc-menu-9.14.1.tgz", + "integrity": "sha512-5wlRb3M8S4yGlWhSoEYJ7ZVRElyScdcpUHxgiLxkeig1tEdyKrnED3B2fhpN0Rrpdp9jyhnmZR/Lwq2fH5VvDQ==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/trigger": "^2.0.0", + "classnames": "2.x", + "rc-motion": "^2.4.3", + "rc-overflow": "^1.3.1", + "rc-util": "^5.27.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-notification": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/rc-notification/-/rc-notification-5.6.0.tgz", + "integrity": "sha512-TGQW5T7waOxLwgJG7fXcw8l7AQiFOjaZ7ISF5PrU526nunHRNcTMuzKihQHaF4E/h/KfOCDk3Mv8eqzbu2e28w==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-motion": "^2.9.0", + "rc-util": "^5.20.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-pagination": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/rc-pagination/-/rc-pagination-4.0.4.tgz", + "integrity": "sha512-GGrLT4NgG6wgJpT/hHIpL9nELv27A1XbSZzECIuQBQTVSf4xGKxWr6I/jhpRPauYEWEbWVw22ObG6tJQqwJqWQ==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.3.2", + "rc-util": "^5.38.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-progress": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/rc-progress/-/rc-progress-4.0.0.tgz", + "integrity": "sha512-oofVMMafOCokIUIBnZLNcOZFsABaUw8PPrf1/y0ZBvKZNpOiu5h4AO9vv11Sw0p4Hb3D0yGWuEattcQGtNJ/aw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.6", + "rc-util": "^5.16.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-rate": { + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/rc-rate/-/rc-rate-2.13.0.tgz", + "integrity": "sha512-oxvx1Q5k5wD30sjN5tqAyWTvJfLNNJn7Oq3IeS4HxWfAiC4BOXMITNAsw7u/fzdtO4MS8Ki8uRLOzcnEuoQiAw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.5", + "rc-util": "^5.0.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "dependencies": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-steps": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/rc-steps/-/rc-steps-6.0.1.tgz", + "integrity": "sha512-lKHL+Sny0SeHkQKKDJlAjV5oZ8DwCdS2hFhAkIjuQt1/pB81M0cA0ErVFdHq9+jmPmFw1vJB2F5NBzFXLJxV+g==", + "dependencies": { + "@babel/runtime": "^7.16.7", + "classnames": "^2.2.3", + "rc-util": "^5.16.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-switch": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/rc-switch/-/rc-switch-4.1.0.tgz", + "integrity": "sha512-TI8ufP2Az9oEbvyCeVE4+90PDSljGyuwix3fV58p7HV2o4wBnVToEyomJRVyTaZeqNPAp+vqeo4Wnj5u0ZZQBg==", + "dependencies": { + "@babel/runtime": "^7.21.0", + "classnames": "^2.2.1", + "rc-util": "^5.30.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-tabs": { + "version": "15.1.1", + "resolved": "https://registry.npmjs.org/rc-tabs/-/rc-tabs-15.1.1.tgz", + "integrity": "sha512-Tc7bJvpEdkWIVCUL7yQrMNBJY3j44NcyWS48jF/UKMXuUlzaXK+Z/pEL5LjGcTadtPvVmNqA40yv7hmr+tCOAw==", + "dependencies": { + "@babel/runtime": "^7.11.2", + "classnames": "2.x", + "rc-dropdown": "~4.2.0", + "rc-menu": "~9.14.0", + "rc-motion": "^2.6.2", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.34.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-textarea": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/rc-textarea/-/rc-textarea-1.7.0.tgz", + "integrity": "sha512-UxizYJkWkmxP3zofXgc487QiGyDmhhheDLLjIWbFtDmiru1ls30KpO8odDaPyqNUIy9ugj5djxTEuezIn6t3Jg==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.1", + "rc-input": "~1.5.0", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.27.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/rc-upload": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/rc-upload/-/rc-upload-4.5.2.tgz", + "integrity": "sha512-QO3ne77DwnAPKFn0bA5qJM81QBjQi0e0NHdkvpFyY73Bea2NfITiotqJqVjHgeYPOJu5lLVR32TNGP084aSoXA==", + "dependencies": { + "@babel/runtime": "^7.18.3", + "classnames": "^2.2.5", + "rc-util": "^5.2.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/antd-v5/node_modules/scroll-into-view-if-needed": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.1.0.tgz", + "integrity": "sha512-49oNpRjWRvnU8NyGVmUaYG4jtTkNonFZI86MmGRDqBphEK2EXT9gdEUoQPZhuBM8yWHxCWbobltqYO5M4XrUvQ==", + "dependencies": { + "compute-scroll-into-view": "^3.0.2" + } + }, "node_modules/antd/node_modules/@ant-design/colors": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/@ant-design/colors/-/colors-5.0.1.tgz", @@ -26060,39 +26599,6 @@ "react-dom": "*" } }, - "node_modules/antd/node_modules/rc-select/node_modules/rc-overflow": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc-overflow/-/rc-overflow-1.2.8.tgz", - "integrity": "sha512-QJ0UItckWPQ37ZL1dMEBAdY1dhfTXFL9k6oTTcyydVwoUNMnMqCGqnRNA98axSr/OeDKqR6DVFyi8eA5RQI/uQ==", - "dependencies": { - "@babel/runtime": "^7.11.1", - "classnames": "^2.2.1", - "rc-resize-observer": "^1.0.0", - "rc-util": "^5.19.2" - }, - "peerDependencies": { - "react": ">=16.9.0", - "react-dom": ">=16.9.0" - } - }, - "node_modules/antd/node_modules/rc-select/node_modules/rc-virtual-list": { - "version": "3.4.11", - "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.4.11.tgz", - "integrity": "sha512-BvUUH60kkeTBPigN5F89HtGaA5jSP4y2aM6cJ4dk9Y42I9yY+h6i08wF6UKeDcxdfOU8j3I5HxkSS/xA77J3wA==", - "dependencies": { - "@babel/runtime": "^7.20.0", - "classnames": "^2.2.6", - "rc-resize-observer": "^1.0.0", - "rc-util": "^5.15.0" - }, - "engines": { - "node": ">=8.x" - }, - "peerDependencies": { - "react": "*", - "react-dom": "*" - } - }, "node_modules/antd/node_modules/rc-slider": { "version": "9.7.5", "resolved": "https://registry.npmjs.org/rc-slider/-/rc-slider-9.7.5.tgz", @@ -26179,24 +26685,6 @@ "react-dom": "*" } }, - "node_modules/antd/node_modules/rc-tree/node_modules/rc-virtual-list": { - "version": "3.4.11", - "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.4.11.tgz", - "integrity": "sha512-BvUUH60kkeTBPigN5F89HtGaA5jSP4y2aM6cJ4dk9Y42I9yY+h6i08wF6UKeDcxdfOU8j3I5HxkSS/xA77J3wA==", - "dependencies": { - "@babel/runtime": "^7.20.0", - "classnames": "^2.2.6", - "rc-resize-observer": "^1.0.0", - "rc-util": "^5.15.0" - }, - "engines": { - "node": ">=8.x" - }, - "peerDependencies": { - "react": "*", - "react-dom": "*" - } - }, "node_modules/anymatch": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", @@ -28661,9 +29149,9 @@ } }, "node_modules/classnames": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", - "integrity": "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==" + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" }, "node_modules/clean-css": { "version": "5.3.3", @@ -29835,9 +30323,9 @@ } }, "node_modules/copy-to-clipboard": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz", - "integrity": "sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz", + "integrity": "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==", "dependencies": { "toggle-selection": "^1.0.6" } @@ -49957,6 +50445,16 @@ "yallist": "^3.0.2" } }, + "node_modules/luxon": { + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz", + "integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==", + "optional": true, + "peer": true, + "engines": { + "node": ">=12" + } + }, "node_modules/lz-string": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", @@ -57176,6 +57674,14 @@ "teleport": ">=0.2.0" } }, + "node_modules/qrcode.react": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/qrcode.react/-/qrcode.react-3.1.0.tgz", + "integrity": "sha512-oyF+Urr3oAMUG/OiOuONL3HXM+53wvuH3mtIWQrYmsXoAq0DkvZp2RYUWFSMFtbdOpuS++9v+WAkzNVkMlNW6Q==", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, "node_modules/qs": { "version": "6.5.3", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", @@ -57452,6 +57958,38 @@ "react-dom": ">=16.9.0" } }, + "node_modules/rc-dialog": { + "version": "9.5.2", + "resolved": "https://registry.npmjs.org/rc-dialog/-/rc-dialog-9.5.2.tgz", + "integrity": "sha512-qVUjc8JukG+j/pNaHVSRa2GO2/KbV2thm7yO4hepQ902eGdYK913sGkwg/fh9yhKYV1ql3BKIN2xnud3rEXAPw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/portal": "^1.0.0-8", + "classnames": "^2.2.6", + "rc-motion": "^2.3.0", + "rc-util": "^5.21.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-drawer": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/rc-drawer/-/rc-drawer-7.2.0.tgz", + "integrity": "sha512-9lOQ7kBekEJRdEpScHvtmEtXnAsy+NGDXiRWc2ZVC7QXAazNVbeT4EraQKYwCME8BJLa8Bxqxvs5swwyOepRwg==", + "dependencies": { + "@babel/runtime": "^7.23.9", + "@rc-component/portal": "^1.1.1", + "classnames": "^2.2.6", + "rc-motion": "^2.6.1", + "rc-util": "^5.38.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, "node_modules/rc-dropdown": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/rc-dropdown/-/rc-dropdown-3.2.0.tgz", @@ -57482,6 +58020,37 @@ "react": ">= 16.9.0" } }, + "node_modules/rc-image": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/rc-image/-/rc-image-7.9.0.tgz", + "integrity": "sha512-l4zqO5E0quuLMCtdKfBgj4Suv8tIS011F5k1zBBlK25iMjjiNHxA0VeTzGFtUZERSA45gvpXDg8/P6qNLjR25g==", + "dependencies": { + "@babel/runtime": "^7.11.2", + "@rc-component/portal": "^1.0.2", + "classnames": "^2.2.6", + "rc-dialog": "~9.5.2", + "rc-motion": "^2.6.2", + "rc-util": "^5.34.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-input": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/rc-input/-/rc-input-1.5.1.tgz", + "integrity": "sha512-+nOzQJDeIfIpNP/SgY45LXSKbuMlp4Yap2y8c+ZpU7XbLmNzUd6+d5/S75sA/52jsVE6S/AkhkkDEAOjIu7i6g==", + "dependencies": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-util": "^5.18.1" + }, + "peerDependencies": { + "react": ">=16.0.0", + "react-dom": ">=16.0.0" + } + }, "node_modules/rc-input-number": { "version": "6.1.2", "resolved": "https://registry.npmjs.org/rc-input-number/-/rc-input-number-6.1.2.tgz", @@ -57533,13 +58102,13 @@ } }, "node_modules/rc-motion": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/rc-motion/-/rc-motion-2.4.1.tgz", - "integrity": "sha512-TWLvymfMu8SngPx5MDH8dQ0D2RYbluNTfam4hY/dNNx9RQ3WtGuZ/GXHi2ymLMzH+UNd6EEFYkOuR5JTTtm8Xg==", + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/rc-motion/-/rc-motion-2.9.2.tgz", + "integrity": "sha512-fUAhHKLDdkAXIDLH0GYwof3raS58dtNUmzLF2MeiR8o6n4thNpSDQhOqQzWE4WfFZDCi9VEN8n7tiB7czREcyw==", "dependencies": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", - "rc-util": "^5.2.1" + "rc-util": "^5.43.0" }, "peerDependencies": { "react": ">=16.9.0", @@ -57564,6 +58133,36 @@ "react-dom": ">=16.9.0" } }, + "node_modules/rc-overflow": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/rc-overflow/-/rc-overflow-1.3.2.tgz", + "integrity": "sha512-nsUm78jkYAoPygDAcGZeC2VwIg/IBGSodtOY3pMof4W3M9qRJgqaDYm03ZayHlde3I6ipliAxbN0RUcGf5KOzw==", + "dependencies": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.37.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-overflow/node_modules/rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "dependencies": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, "node_modules/rc-pagination": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/rc-pagination/-/rc-pagination-3.1.2.tgz", @@ -57577,6 +58176,59 @@ "react-dom": ">=16.9.0" } }, + "node_modules/rc-picker": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/rc-picker/-/rc-picker-4.5.0.tgz", + "integrity": "sha512-suqz9bzuhBQlf7u+bZd1bJLPzhXpk12w6AjQ9BTPTiFwexVZgUKViG1KNLyfFvW6tCUZZK0HmCCX7JAyM+JnCg==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.2.1", + "rc-overflow": "^1.3.2", + "rc-resize-observer": "^1.4.0", + "rc-util": "^5.38.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "date-fns": ">= 2.x", + "dayjs": ">= 1.x", + "luxon": ">= 3.x", + "moment": ">= 2.x", + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + }, + "peerDependenciesMeta": { + "date-fns": { + "optional": true + }, + "dayjs": { + "optional": true + }, + "luxon": { + "optional": true + }, + "moment": { + "optional": true + } + } + }, + "node_modules/rc-picker/node_modules/rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "dependencies": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, "node_modules/rc-progress": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/rc-progress/-/rc-progress-3.1.1.tgz", @@ -57622,6 +58274,59 @@ "react-dom": ">=16.9.0" } }, + "node_modules/rc-segmented": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/rc-segmented/-/rc-segmented-2.3.0.tgz", + "integrity": "sha512-I3FtM5Smua/ESXutFfb8gJ8ZPcvFR+qUgeeGFQHBOvRiRKyAk4aBE5nfqrxXx+h8/vn60DQjOt6i4RNtrbOobg==", + "dependencies": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-motion": "^2.4.4", + "rc-util": "^5.17.0" + }, + "peerDependencies": { + "react": ">=16.0.0", + "react-dom": ">=16.0.0" + } + }, + "node_modules/rc-select": { + "version": "14.14.0", + "resolved": "https://registry.npmjs.org/rc-select/-/rc-select-14.14.0.tgz", + "integrity": "sha512-Uo2wulrjoPPRLCPd7zlK4ZFVJxlTN//yp1xWP/U+TUOQCyXrT+Duvq/Si5OzVcmQyWAUSbsplc2OwNNhvbOeKQ==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/trigger": "^2.1.1", + "classnames": "2.x", + "rc-motion": "^2.0.1", + "rc-overflow": "^1.3.1", + "rc-util": "^5.16.1", + "rc-virtual-list": "^3.5.2" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": "*", + "react-dom": "*" + } + }, + "node_modules/rc-slider": { + "version": "10.6.2", + "resolved": "https://registry.npmjs.org/rc-slider/-/rc-slider-10.6.2.tgz", + "integrity": "sha512-FjkoFjyvUQWcBo1F3RgSglky3ar0+qHLM41PlFVYB4Bj3RD8E/Mv7kqMouLFBU+3aFglMzzctAIWRwajEuueSw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.5", + "rc-util": "^5.36.0" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, "node_modules/rc-steps": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/rc-steps/-/rc-steps-4.1.3.tgz", @@ -57653,6 +58358,41 @@ "react-dom": ">=16.9.0" } }, + "node_modules/rc-table": { + "version": "7.45.7", + "resolved": "https://registry.npmjs.org/rc-table/-/rc-table-7.45.7.tgz", + "integrity": "sha512-wi9LetBL1t1csxyGkMB2p3mCiMt+NDexMlPbXHvQFmBBAsMxrgNSAPwUci2zDLUq9m8QdWc1Nh8suvrpy9mXrg==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/context": "^1.4.0", + "classnames": "^2.2.5", + "rc-resize-observer": "^1.1.0", + "rc-util": "^5.37.0", + "rc-virtual-list": "^3.14.2" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-table/node_modules/rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "dependencies": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, "node_modules/rc-tabs": { "version": "11.7.2", "resolved": "https://registry.npmjs.org/rc-tabs/-/rc-tabs-11.7.2.tgz", @@ -57688,19 +58428,72 @@ "react-dom": ">=16.9.0" } }, - "node_modules/rc-trigger": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.2.0.tgz", - "integrity": "sha512-fpC1ZkM/IgIIDfF6XHx3Hb2zXy9wvdI5eMh+6DdLygk6Z3HGmkri6ZCXg9a0wfF9AFuzlYTeBLS1uRASZRsnMQ==", + "node_modules/rc-tooltip": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/rc-tooltip/-/rc-tooltip-6.2.0.tgz", + "integrity": "sha512-iS/3iOAvtDh9GIx1ulY7EFUXUtktFccNLsARo3NPgLf0QW9oT0w3dA9cYWlhqAKmD+uriEwdWz1kH0Qs4zk2Aw==", "dependencies": { "@babel/runtime": "^7.11.2", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.3.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-tree": { + "version": "5.8.8", + "resolved": "https://registry.npmjs.org/rc-tree/-/rc-tree-5.8.8.tgz", + "integrity": "sha512-S+mCMWo91m5AJqjz3PdzKilGgbFm7fFJRFiTDOcoRbD7UfMOPnerXwMworiga0O2XIo383UoWuEfeHs1WOltag==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-motion": "^2.0.1", + "rc-util": "^5.16.1", + "rc-virtual-list": "^3.5.1" + }, + "engines": { + "node": ">=10.x" + }, + "peerDependencies": { + "react": "*", + "react-dom": "*" + } + }, + "node_modules/rc-tree-select": { + "version": "5.21.0", + "resolved": "https://registry.npmjs.org/rc-tree-select/-/rc-tree-select-5.21.0.tgz", + "integrity": "sha512-w+9qEu6zh0G3wt9N/hzWNSnqYH1i9mH1Nqxo0caxLRRFXF5yZWYmpCDoDTMdQM1Y4z3Q5yj08qyrPH/d4AtumA==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-select": "~14.14.0", + "rc-tree": "~5.8.1", + "rc-util": "^5.16.1" + }, + "peerDependencies": { + "react": "*", + "react-dom": "*" + } + }, + "node_modules/rc-trigger": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.3.4.tgz", + "integrity": "sha512-mQv+vas0TwKcjAO2izNPkqR4j86OemLRmvL2nOzdP9OWNWA1ivoTt5hzFqYNW9zACwmTezRiN8bttrC7cZzYSw==", + "dependencies": { + "@babel/runtime": "^7.18.3", "classnames": "^2.2.6", "rc-align": "^4.0.0", "rc-motion": "^2.0.0", - "rc-util": "^5.5.0" + "rc-util": "^5.19.2" }, "engines": { "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" } }, "node_modules/rc-upload": { @@ -57718,9 +58511,9 @@ } }, "node_modules/rc-util": { - "version": "5.38.1", - "resolved": "https://registry.npmjs.org/rc-util/-/rc-util-5.38.1.tgz", - "integrity": "sha512-e4ZMs7q9XqwTuhIK7zBIVFltUtMSjphuPPQXHoHlzRzNdOwUxDejo0Zls5HYaJfRKNURcsS/ceKVULlhjBrxng==", + "version": "5.43.0", + "resolved": "https://registry.npmjs.org/rc-util/-/rc-util-5.43.0.tgz", + "integrity": "sha512-AzC7KKOXFqAdIBqdGWepL9Xn7cm3vnAmjlHqUnoQaTMZYhM4VlXGLkkHHxj/BZ7Td0+SOPKB4RGPboBVKT9htw==", "dependencies": { "@babel/runtime": "^7.18.3", "react-is": "^18.2.0" @@ -57735,6 +58528,39 @@ "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" }, + "node_modules/rc-virtual-list": { + "version": "3.14.3", + "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.14.3.tgz", + "integrity": "sha512-6+6wiEhdqakNBnbRJymgMlh+90qpkgqherTRo1l1cX7mK6F9hWsazPczmP0lA+64yhC9/t+M9Dh5pjvDWimn8A==", + "dependencies": { + "@babel/runtime": "^7.20.0", + "classnames": "^2.2.6", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.36.0" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-virtual-list/node_modules/rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "dependencies": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, "node_modules/re-resizable": { "version": "6.9.11", "resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.9.11.tgz", @@ -63081,6 +63907,14 @@ "integrity": "sha512-WKexMoJj3vEuK0yFEapj8y64V0A6xcuPuK9Gt1d0R+dzCSJc0lHqQytAbSB4cDAK0dWh4T0E2ETkoLE2WZ41OQ==", "dev": true }, + "node_modules/throttle-debounce": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-5.0.2.tgz", + "integrity": "sha512-B71/4oyj61iNH0KeCamLuE2rmKuTO5byTOSVwECM5FA7TiAiAW+UqTKZ9ERueC4qvgSttUhdmq1mXC3kJqGX7A==", + "engines": { + "node": ">=12.22" + } + }, "node_modules/throttleit": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", @@ -70906,11 +71740,6 @@ "@types/react": "*" } }, - "plugins/plugin-chart-table/node_modules/classnames": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", - "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" - }, "plugins/plugin-chart-table/node_modules/d3-array": { "version": "2.12.1", "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", @@ -71072,29 +71901,55 @@ } }, "@ant-design/colors": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@ant-design/colors/-/colors-7.0.0.tgz", - "integrity": "sha512-iVm/9PfGCbC0dSMBrz7oiEXZaaGH7ceU40OJEfKmyuzR9R5CRimJYPlRiFtMQGQcbNMea/ePcoIebi4ASGYXtg==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@ant-design/colors/-/colors-7.0.2.tgz", + "integrity": "sha512-7KJkhTiPiLHSu+LmMJnehfJ6242OCxSlR3xHVBecYxnMW8MS/878NXct1GqYARyL59fyeFdKRxXTfvR9SnDgJg==", "requires": { - "@ctrl/tinycolor": "^3.4.0" + "@ctrl/tinycolor": "^3.6.1" + } + }, + "@ant-design/cssinjs": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@ant-design/cssinjs/-/cssinjs-1.21.0.tgz", + "integrity": "sha512-gIilraPl+9EoKdYxnupxjHB/Q6IHNRjEXszKbDxZdsgv4sAZ9pjkCq8yanDWNvyfjp4leir2OVAJm0vxwKK8YA==", + "requires": { + "@babel/runtime": "^7.11.1", + "@emotion/hash": "^0.8.0", + "@emotion/unitless": "^0.7.5", + "classnames": "^2.3.1", + "csstype": "^3.1.3", + "rc-util": "^5.35.0", + "stylis": "^4.0.13" } }, "@ant-design/icons": { - "version": "5.2.6", - "resolved": "https://registry.npmjs.org/@ant-design/icons/-/icons-5.2.6.tgz", - "integrity": "sha512-4wn0WShF43TrggskBJPRqCD0fcHbzTYjnaoskdiJrVHg86yxoZ8ZUqsXvyn4WUqehRiFKnaclOhqk9w4Ui2KVw==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@ant-design/icons/-/icons-5.3.7.tgz", + "integrity": "sha512-bCPXTAg66f5bdccM4TT21SQBDO1Ek2gho9h3nO9DAKXJP4sq+5VBjrQMSxMVXSB3HyEz+cUbHQ5+6ogxCOpaew==", "requires": { "@ant-design/colors": "^7.0.0", - "@ant-design/icons-svg": "^4.3.0", + "@ant-design/icons-svg": "^4.4.0", "@babel/runtime": "^7.11.2", "classnames": "^2.2.6", "rc-util": "^5.31.1" } }, "@ant-design/icons-svg": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/@ant-design/icons-svg/-/icons-svg-4.3.1.tgz", - "integrity": "sha512-4QBZg8ccyC6LPIRii7A0bZUk3+lEDCLnhB+FVsflGdcWPPmV+j3fire4AwwoqHV/BibgvBmR9ZIo4s867smv+g==" + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@ant-design/icons-svg/-/icons-svg-4.4.2.tgz", + "integrity": "sha512-vHbT+zJEVzllwP+CM+ul7reTEfBR0vgxFe7+lREAsAA7YGsYpboiq2sQNeQeRvh09GfQgs/GyFEvZpJ9cLXpXA==" + }, + "@ant-design/react-slick": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@ant-design/react-slick/-/react-slick-1.1.2.tgz", + "integrity": "sha512-EzlvzE6xQUBrZuuhSAFTdsr4P2bBBHGZwKFemEfq8gIGyIQCxalYfZW/T2ORbtQx5rU69o+WycP3exY/7T1hGA==", + "requires": { + "@babel/runtime": "^7.10.4", + "classnames": "^2.2.5", + "json2mq": "^0.2.0", + "resize-observer-polyfill": "^1.5.1", + "throttle-debounce": "^5.0.0" + } }, "@applitools/core": { "version": "4.12.2", @@ -73414,9 +74269,9 @@ "dev": true }, "@babel/runtime": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.9.tgz", - "integrity": "sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.7.tgz", + "integrity": "sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==", "requires": { "regenerator-runtime": "^0.14.0" }, @@ -73544,9 +74399,9 @@ } }, "@ctrl/tinycolor": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-3.4.1.tgz", - "integrity": "sha512-ej5oVy6lykXsvieQtqZxCOaLT+xD4+QNarq78cIYISHmZXshCvROLudpQN3lfL8G0NL7plMSSK+zlyvCaIJ4Iw==" + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-3.6.1.tgz", + "integrity": "sha512-SITSV6aIXsuVNV3f3O0f2n/cgyEDWoSqtZMYiAmcsYHydcKrOz3gUxB/iXd/Qf08+IZX4KpgNbvUdMBmWz+kcA==" }, "@cypress/mount-utils": { "version": "1.0.2", @@ -80850,6 +81705,100 @@ "@babel/runtime": "^7.13.10" } }, + "@rc-component/async-validator": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@rc-component/async-validator/-/async-validator-5.0.4.tgz", + "integrity": "sha512-qgGdcVIF604M9EqjNF0hbUTz42bz/RDtxWdWuU5EQe3hi7M8ob54B6B35rOsvX5eSvIHIzT9iH1R3n+hk3CGfg==", + "requires": { + "@babel/runtime": "^7.24.4" + } + }, + "@rc-component/color-picker": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/@rc-component/color-picker/-/color-picker-1.5.3.tgz", + "integrity": "sha512-+tGGH3nLmYXTalVe0L8hSZNs73VTP5ueSHwUlDC77KKRaN7G4DS4wcpG5DTDzdcV/Yas+rzA6UGgIyzd8fS4cw==", + "requires": { + "@babel/runtime": "^7.23.6", + "@ctrl/tinycolor": "^3.6.1", + "classnames": "^2.2.6", + "rc-util": "^5.38.1" + } + }, + "@rc-component/context": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@rc-component/context/-/context-1.4.0.tgz", + "integrity": "sha512-kFcNxg9oLRMoL3qki0OMxK+7g5mypjgaaJp/pkOis/6rVxma9nJBF/8kCIuTYHUQNr0ii7MxqE33wirPZLJQ2w==", + "requires": { + "@babel/runtime": "^7.10.1", + "rc-util": "^5.27.0" + } + }, + "@rc-component/mini-decimal": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rc-component/mini-decimal/-/mini-decimal-1.1.0.tgz", + "integrity": "sha512-jS4E7T9Li2GuYwI6PyiVXmxTiM6b07rlD9Ge8uGZSCz3WlzcG5ZK7g5bbuKNeZ9pgUuPK/5guV781ujdVpm4HQ==", + "requires": { + "@babel/runtime": "^7.18.0" + } + }, + "@rc-component/mutate-observer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rc-component/mutate-observer/-/mutate-observer-1.1.0.tgz", + "integrity": "sha512-QjrOsDXQusNwGZPf4/qRQasg7UFEj06XiCJ8iuiq/Io7CrHrgVi6Uuetw60WAMG1799v+aM8kyc+1L/GBbHSlw==", + "requires": { + "@babel/runtime": "^7.18.0", + "classnames": "^2.3.2", + "rc-util": "^5.24.4" + } + }, + "@rc-component/portal": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@rc-component/portal/-/portal-1.1.2.tgz", + "integrity": "sha512-6f813C0IsasTZms08kfA8kPAGxbbkYToa8ALaiDIGGECU4i9hj8Plgbx0sNJDrey3EtHO30hmdaxtT0138xZcg==", + "requires": { + "@babel/runtime": "^7.18.0", + "classnames": "^2.3.2", + "rc-util": "^5.24.4" + } + }, + "@rc-component/tour": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@rc-component/tour/-/tour-1.15.0.tgz", + "integrity": "sha512-h6hyILDwL+In9GAgRobwRWihLqqsD7Uft3fZGrJ7L4EiyCoxbnNYwzPXDfz7vNDhWeVyvAWQJj9fJCzpI4+b4g==", + "requires": { + "@babel/runtime": "^7.18.0", + "@rc-component/portal": "^1.0.0-9", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.3.2", + "rc-util": "^5.24.4" + } + }, + "@rc-component/trigger": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@rc-component/trigger/-/trigger-2.2.0.tgz", + "integrity": "sha512-QarBCji02YE9aRFhZgRZmOpXBj0IZutRippsVBv85sxvG4FGk/vRxwAlkn3MS9zK5mwbETd86mAVg2tKqTkdJA==", + "requires": { + "@babel/runtime": "^7.23.2", + "@rc-component/portal": "^1.1.0", + "classnames": "^2.3.2", + "rc-motion": "^2.0.0", + "rc-resize-observer": "^1.3.1", + "rc-util": "^5.38.0" + }, + "dependencies": { + "rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "requires": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + } + } + } + }, "@react-dnd/asap": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@react-dnd/asap/-/asap-4.0.0.tgz", @@ -88954,11 +89903,6 @@ "@types/react": "*" } }, - "classnames": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", - "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" - }, "d3-array": { "version": "2.12.1", "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", @@ -92510,30 +93454,6 @@ "rc-trigger": "^5.0.4", "rc-util": "^5.9.8", "rc-virtual-list": "^3.2.0" - }, - "dependencies": { - "rc-overflow": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc-overflow/-/rc-overflow-1.2.8.tgz", - "integrity": "sha512-QJ0UItckWPQ37ZL1dMEBAdY1dhfTXFL9k6oTTcyydVwoUNMnMqCGqnRNA98axSr/OeDKqR6DVFyi8eA5RQI/uQ==", - "requires": { - "@babel/runtime": "^7.11.1", - "classnames": "^2.2.1", - "rc-resize-observer": "^1.0.0", - "rc-util": "^5.19.2" - } - }, - "rc-virtual-list": { - "version": "3.4.11", - "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.4.11.tgz", - "integrity": "sha512-BvUUH60kkeTBPigN5F89HtGaA5jSP4y2aM6cJ4dk9Y42I9yY+h6i08wF6UKeDcxdfOU8j3I5HxkSS/xA77J3wA==", - "requires": { - "@babel/runtime": "^7.20.0", - "classnames": "^2.2.6", - "rc-resize-observer": "^1.0.0", - "rc-util": "^5.15.0" - } - } } }, "rc-slider": { @@ -92579,19 +93499,6 @@ "rc-motion": "^2.0.1", "rc-util": "^5.0.0", "rc-virtual-list": "^3.0.1" - }, - "dependencies": { - "rc-virtual-list": { - "version": "3.4.11", - "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.4.11.tgz", - "integrity": "sha512-BvUUH60kkeTBPigN5F89HtGaA5jSP4y2aM6cJ4dk9Y42I9yY+h6i08wF6UKeDcxdfOU8j3I5HxkSS/xA77J3wA==", - "requires": { - "@babel/runtime": "^7.20.0", - "classnames": "^2.2.6", - "rc-resize-observer": "^1.0.0", - "rc-util": "^5.15.0" - } - } } }, "rc-tree-select": { @@ -92608,6 +93515,278 @@ } } }, + "antd-v5": { + "version": "npm:antd@5.18.3", + "resolved": "https://registry.npmjs.org/antd/-/antd-5.18.3.tgz", + "integrity": "sha512-Dm3P8HBxoo/DiR/QZLj5Mk+rQZsSXxCCArSZACHGiklkkjW6klzlebAElOUr9NyDeFX7UnQ6LVk7vznXlnjTqQ==", + "requires": { + "@ant-design/colors": "^7.0.2", + "@ant-design/cssinjs": "^1.21.0", + "@ant-design/icons": "^5.3.7", + "@ant-design/react-slick": "~1.1.2", + "@babel/runtime": "^7.24.7", + "@ctrl/tinycolor": "^3.6.1", + "@rc-component/color-picker": "~1.5.3", + "@rc-component/mutate-observer": "^1.1.0", + "@rc-component/tour": "~1.15.0", + "@rc-component/trigger": "^2.2.0", + "classnames": "^2.5.1", + "copy-to-clipboard": "^3.3.3", + "dayjs": "^1.11.11", + "qrcode.react": "^3.1.0", + "rc-cascader": "~3.26.0", + "rc-checkbox": "~3.3.0", + "rc-collapse": "~3.7.3", + "rc-dialog": "~9.5.2", + "rc-drawer": "~7.2.0", + "rc-dropdown": "~4.2.0", + "rc-field-form": "~2.2.1", + "rc-image": "~7.9.0", + "rc-input": "~1.5.1", + "rc-input-number": "~9.1.0", + "rc-mentions": "~2.14.0", + "rc-menu": "~9.14.0", + "rc-motion": "^2.9.2", + "rc-notification": "~5.6.0", + "rc-pagination": "~4.0.4", + "rc-picker": "~4.5.0", + "rc-progress": "~4.0.0", + "rc-rate": "~2.13.0", + "rc-resize-observer": "^1.4.0", + "rc-segmented": "~2.3.0", + "rc-select": "~14.14.0", + "rc-slider": "~10.6.2", + "rc-steps": "~6.0.1", + "rc-switch": "~4.1.0", + "rc-table": "~7.45.7", + "rc-tabs": "~15.1.1", + "rc-textarea": "~1.7.0", + "rc-tooltip": "~6.2.0", + "rc-tree": "~5.8.8", + "rc-tree-select": "~5.21.0", + "rc-upload": "~4.5.2", + "rc-util": "^5.43.0", + "scroll-into-view-if-needed": "^3.1.0", + "throttle-debounce": "^5.0.0" + }, + "dependencies": { + "compute-scroll-into-view": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.1.0.tgz", + "integrity": "sha512-rj8l8pD4bJ1nx+dAkMhV1xB5RuZEyVysfxJqB1pRchh1KVvwOv9b7CGB8ZfjTImVv2oF+sYMUkMZq6Na5Ftmbg==" + }, + "rc-cascader": { + "version": "3.26.0", + "resolved": "https://registry.npmjs.org/rc-cascader/-/rc-cascader-3.26.0.tgz", + "integrity": "sha512-L1dml383TPSJD1I11YwxuVbmqaJY64psZqFp1ETlgl3LEOwDu76Cyl11fw5dmjJhMlUWwM5dECQfqJgfebhUjg==", + "requires": { + "@babel/runtime": "^7.12.5", + "array-tree-filter": "^2.1.0", + "classnames": "^2.3.1", + "rc-select": "~14.14.0", + "rc-tree": "~5.8.1", + "rc-util": "^5.37.0" + } + }, + "rc-checkbox": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/rc-checkbox/-/rc-checkbox-3.3.0.tgz", + "integrity": "sha512-Ih3ZaAcoAiFKJjifzwsGiT/f/quIkxJoklW4yKGho14Olulwn8gN7hOBve0/WGDg5o/l/5mL0w7ff7/YGvefVw==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.3.2", + "rc-util": "^5.25.2" + } + }, + "rc-collapse": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/rc-collapse/-/rc-collapse-3.7.3.tgz", + "integrity": "sha512-60FJcdTRn0X5sELF18TANwtVi7FtModq649H11mYF1jh83DniMoM4MqY627sEKRCTm4+WXfGDcB7hY5oW6xhyw==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-motion": "^2.3.4", + "rc-util": "^5.27.0" + } + }, + "rc-dropdown": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/rc-dropdown/-/rc-dropdown-4.2.0.tgz", + "integrity": "sha512-odM8Ove+gSh0zU27DUj5cG1gNKg7mLWBYzB5E4nNLrLwBmYEgYP43vHKDGOVZcJSVElQBI0+jTQgjnq0NfLjng==", + "requires": { + "@babel/runtime": "^7.18.3", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.2.6", + "rc-util": "^5.17.0" + } + }, + "rc-field-form": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/rc-field-form/-/rc-field-form-2.2.1.tgz", + "integrity": "sha512-uoNqDoR7A4tn4QTSqoWPAzrR7ZwOK5I+vuZ/qdcHtbKx+ZjEsTg7QXm2wk/jalDiSksAQmATxL0T5LJkRREdIA==", + "requires": { + "@babel/runtime": "^7.18.0", + "@rc-component/async-validator": "^5.0.3", + "rc-util": "^5.32.2" + } + }, + "rc-input-number": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/rc-input-number/-/rc-input-number-9.1.0.tgz", + "integrity": "sha512-NqJ6i25Xn/AgYfVxynlevIhX3FuKlMwIFpucGG1h98SlK32wQwDK0zhN9VY32McOmuaqzftduNYWWooWz8pXQA==", + "requires": { + "@babel/runtime": "^7.10.1", + "@rc-component/mini-decimal": "^1.0.1", + "classnames": "^2.2.5", + "rc-input": "~1.5.0", + "rc-util": "^5.40.1" + } + }, + "rc-mentions": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/rc-mentions/-/rc-mentions-2.14.0.tgz", + "integrity": "sha512-qKR59FMuF8PK4ZqsbWX3UuA5P1M/snzyqV6Yt3y1DCFbCEdqUGIBgQp6vEfLCO6Z0RoRFlzXtCeSlBTcDDpg1A==", + "requires": { + "@babel/runtime": "^7.22.5", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.2.6", + "rc-input": "~1.5.0", + "rc-menu": "~9.14.0", + "rc-textarea": "~1.7.0", + "rc-util": "^5.34.1" + } + }, + "rc-menu": { + "version": "9.14.1", + "resolved": "https://registry.npmjs.org/rc-menu/-/rc-menu-9.14.1.tgz", + "integrity": "sha512-5wlRb3M8S4yGlWhSoEYJ7ZVRElyScdcpUHxgiLxkeig1tEdyKrnED3B2fhpN0Rrpdp9jyhnmZR/Lwq2fH5VvDQ==", + "requires": { + "@babel/runtime": "^7.10.1", + "@rc-component/trigger": "^2.0.0", + "classnames": "2.x", + "rc-motion": "^2.4.3", + "rc-overflow": "^1.3.1", + "rc-util": "^5.27.0" + } + }, + "rc-notification": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/rc-notification/-/rc-notification-5.6.0.tgz", + "integrity": "sha512-TGQW5T7waOxLwgJG7fXcw8l7AQiFOjaZ7ISF5PrU526nunHRNcTMuzKihQHaF4E/h/KfOCDk3Mv8eqzbu2e28w==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-motion": "^2.9.0", + "rc-util": "^5.20.1" + } + }, + "rc-pagination": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/rc-pagination/-/rc-pagination-4.0.4.tgz", + "integrity": "sha512-GGrLT4NgG6wgJpT/hHIpL9nELv27A1XbSZzECIuQBQTVSf4xGKxWr6I/jhpRPauYEWEbWVw22ObG6tJQqwJqWQ==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.3.2", + "rc-util": "^5.38.0" + } + }, + "rc-progress": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/rc-progress/-/rc-progress-4.0.0.tgz", + "integrity": "sha512-oofVMMafOCokIUIBnZLNcOZFsABaUw8PPrf1/y0ZBvKZNpOiu5h4AO9vv11Sw0p4Hb3D0yGWuEattcQGtNJ/aw==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.6", + "rc-util": "^5.16.1" + } + }, + "rc-rate": { + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/rc-rate/-/rc-rate-2.13.0.tgz", + "integrity": "sha512-oxvx1Q5k5wD30sjN5tqAyWTvJfLNNJn7Oq3IeS4HxWfAiC4BOXMITNAsw7u/fzdtO4MS8Ki8uRLOzcnEuoQiAw==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.5", + "rc-util": "^5.0.1" + } + }, + "rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "requires": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + } + }, + "rc-steps": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/rc-steps/-/rc-steps-6.0.1.tgz", + "integrity": "sha512-lKHL+Sny0SeHkQKKDJlAjV5oZ8DwCdS2hFhAkIjuQt1/pB81M0cA0ErVFdHq9+jmPmFw1vJB2F5NBzFXLJxV+g==", + "requires": { + "@babel/runtime": "^7.16.7", + "classnames": "^2.2.3", + "rc-util": "^5.16.1" + } + }, + "rc-switch": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/rc-switch/-/rc-switch-4.1.0.tgz", + "integrity": "sha512-TI8ufP2Az9oEbvyCeVE4+90PDSljGyuwix3fV58p7HV2o4wBnVToEyomJRVyTaZeqNPAp+vqeo4Wnj5u0ZZQBg==", + "requires": { + "@babel/runtime": "^7.21.0", + "classnames": "^2.2.1", + "rc-util": "^5.30.0" + } + }, + "rc-tabs": { + "version": "15.1.1", + "resolved": "https://registry.npmjs.org/rc-tabs/-/rc-tabs-15.1.1.tgz", + "integrity": "sha512-Tc7bJvpEdkWIVCUL7yQrMNBJY3j44NcyWS48jF/UKMXuUlzaXK+Z/pEL5LjGcTadtPvVmNqA40yv7hmr+tCOAw==", + "requires": { + "@babel/runtime": "^7.11.2", + "classnames": "2.x", + "rc-dropdown": "~4.2.0", + "rc-menu": "~9.14.0", + "rc-motion": "^2.6.2", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.34.1" + } + }, + "rc-textarea": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/rc-textarea/-/rc-textarea-1.7.0.tgz", + "integrity": "sha512-UxizYJkWkmxP3zofXgc487QiGyDmhhheDLLjIWbFtDmiru1ls30KpO8odDaPyqNUIy9ugj5djxTEuezIn6t3Jg==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.1", + "rc-input": "~1.5.0", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.27.0" + } + }, + "rc-upload": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/rc-upload/-/rc-upload-4.5.2.tgz", + "integrity": "sha512-QO3ne77DwnAPKFn0bA5qJM81QBjQi0e0NHdkvpFyY73Bea2NfITiotqJqVjHgeYPOJu5lLVR32TNGP084aSoXA==", + "requires": { + "@babel/runtime": "^7.18.3", + "classnames": "^2.2.5", + "rc-util": "^5.2.0" + } + }, + "scroll-into-view-if-needed": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.1.0.tgz", + "integrity": "sha512-49oNpRjWRvnU8NyGVmUaYG4jtTkNonFZI86MmGRDqBphEK2EXT9gdEUoQPZhuBM8yWHxCWbobltqYO5M4XrUvQ==", + "requires": { + "compute-scroll-into-view": "^3.0.2" + } + } + } + }, "anymatch": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", @@ -94518,9 +95697,9 @@ } }, "classnames": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", - "integrity": "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==" + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" }, "clean-css": { "version": "5.3.3", @@ -95450,9 +96629,9 @@ "dev": true }, "copy-to-clipboard": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz", - "integrity": "sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz", + "integrity": "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==", "requires": { "toggle-selection": "^1.0.6" } @@ -110677,6 +111856,13 @@ "yallist": "^3.0.2" } }, + "luxon": { + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz", + "integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==", + "optional": true, + "peer": true + }, "lz-string": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", @@ -116009,6 +117195,12 @@ "integrity": "sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==", "dev": true }, + "qrcode.react": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/qrcode.react/-/qrcode.react-3.1.0.tgz", + "integrity": "sha512-oyF+Urr3oAMUG/OiOuONL3HXM+53wvuH3mtIWQrYmsXoAq0DkvZp2RYUWFSMFtbdOpuS++9v+WAkzNVkMlNW6Q==", + "requires": {} + }, "qs": { "version": "6.5.3", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", @@ -116222,6 +117414,30 @@ "shallowequal": "^1.1.0" } }, + "rc-dialog": { + "version": "9.5.2", + "resolved": "https://registry.npmjs.org/rc-dialog/-/rc-dialog-9.5.2.tgz", + "integrity": "sha512-qVUjc8JukG+j/pNaHVSRa2GO2/KbV2thm7yO4hepQ902eGdYK913sGkwg/fh9yhKYV1ql3BKIN2xnud3rEXAPw==", + "requires": { + "@babel/runtime": "^7.10.1", + "@rc-component/portal": "^1.0.0-8", + "classnames": "^2.2.6", + "rc-motion": "^2.3.0", + "rc-util": "^5.21.0" + } + }, + "rc-drawer": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/rc-drawer/-/rc-drawer-7.2.0.tgz", + "integrity": "sha512-9lOQ7kBekEJRdEpScHvtmEtXnAsy+NGDXiRWc2ZVC7QXAazNVbeT4EraQKYwCME8BJLa8Bxqxvs5swwyOepRwg==", + "requires": { + "@babel/runtime": "^7.23.9", + "@rc-component/portal": "^1.1.1", + "classnames": "^2.2.6", + "rc-motion": "^2.6.1", + "rc-util": "^5.38.1" + } + }, "rc-dropdown": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/rc-dropdown/-/rc-dropdown-3.2.0.tgz", @@ -116242,6 +117458,29 @@ "rc-util": "^5.0.0" } }, + "rc-image": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/rc-image/-/rc-image-7.9.0.tgz", + "integrity": "sha512-l4zqO5E0quuLMCtdKfBgj4Suv8tIS011F5k1zBBlK25iMjjiNHxA0VeTzGFtUZERSA45gvpXDg8/P6qNLjR25g==", + "requires": { + "@babel/runtime": "^7.11.2", + "@rc-component/portal": "^1.0.2", + "classnames": "^2.2.6", + "rc-dialog": "~9.5.2", + "rc-motion": "^2.6.2", + "rc-util": "^5.34.1" + } + }, + "rc-input": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/rc-input/-/rc-input-1.5.1.tgz", + "integrity": "sha512-+nOzQJDeIfIpNP/SgY45LXSKbuMlp4Yap2y8c+ZpU7XbLmNzUd6+d5/S75sA/52jsVE6S/AkhkkDEAOjIu7i6g==", + "requires": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-util": "^5.18.1" + } + }, "rc-input-number": { "version": "6.1.2", "resolved": "https://registry.npmjs.org/rc-input-number/-/rc-input-number-6.1.2.tgz", @@ -116282,13 +117521,13 @@ } }, "rc-motion": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/rc-motion/-/rc-motion-2.4.1.tgz", - "integrity": "sha512-TWLvymfMu8SngPx5MDH8dQ0D2RYbluNTfam4hY/dNNx9RQ3WtGuZ/GXHi2ymLMzH+UNd6EEFYkOuR5JTTtm8Xg==", + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/rc-motion/-/rc-motion-2.9.2.tgz", + "integrity": "sha512-fUAhHKLDdkAXIDLH0GYwof3raS58dtNUmzLF2MeiR8o6n4thNpSDQhOqQzWE4WfFZDCi9VEN8n7tiB7czREcyw==", "requires": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", - "rc-util": "^5.2.1" + "rc-util": "^5.43.0" } }, "rc-notification": { @@ -116302,6 +117541,30 @@ "rc-util": "^5.0.1" } }, + "rc-overflow": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/rc-overflow/-/rc-overflow-1.3.2.tgz", + "integrity": "sha512-nsUm78jkYAoPygDAcGZeC2VwIg/IBGSodtOY3pMof4W3M9qRJgqaDYm03ZayHlde3I6ipliAxbN0RUcGf5KOzw==", + "requires": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.37.0" + }, + "dependencies": { + "rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "requires": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + } + } + } + }, "rc-pagination": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/rc-pagination/-/rc-pagination-3.1.2.tgz", @@ -116311,6 +117574,32 @@ "classnames": "^2.2.1" } }, + "rc-picker": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/rc-picker/-/rc-picker-4.5.0.tgz", + "integrity": "sha512-suqz9bzuhBQlf7u+bZd1bJLPzhXpk12w6AjQ9BTPTiFwexVZgUKViG1KNLyfFvW6tCUZZK0HmCCX7JAyM+JnCg==", + "requires": { + "@babel/runtime": "^7.10.1", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.2.1", + "rc-overflow": "^1.3.2", + "rc-resize-observer": "^1.4.0", + "rc-util": "^5.38.1" + }, + "dependencies": { + "rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "requires": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + } + } + } + }, "rc-progress": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/rc-progress/-/rc-progress-3.1.1.tgz", @@ -116341,6 +117630,41 @@ "resize-observer-polyfill": "^1.5.1" } }, + "rc-segmented": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/rc-segmented/-/rc-segmented-2.3.0.tgz", + "integrity": "sha512-I3FtM5Smua/ESXutFfb8gJ8ZPcvFR+qUgeeGFQHBOvRiRKyAk4aBE5nfqrxXx+h8/vn60DQjOt6i4RNtrbOobg==", + "requires": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-motion": "^2.4.4", + "rc-util": "^5.17.0" + } + }, + "rc-select": { + "version": "14.14.0", + "resolved": "https://registry.npmjs.org/rc-select/-/rc-select-14.14.0.tgz", + "integrity": "sha512-Uo2wulrjoPPRLCPd7zlK4ZFVJxlTN//yp1xWP/U+TUOQCyXrT+Duvq/Si5OzVcmQyWAUSbsplc2OwNNhvbOeKQ==", + "requires": { + "@babel/runtime": "^7.10.1", + "@rc-component/trigger": "^2.1.1", + "classnames": "2.x", + "rc-motion": "^2.0.1", + "rc-overflow": "^1.3.1", + "rc-util": "^5.16.1", + "rc-virtual-list": "^3.5.2" + } + }, + "rc-slider": { + "version": "10.6.2", + "resolved": "https://registry.npmjs.org/rc-slider/-/rc-slider-10.6.2.tgz", + "integrity": "sha512-FjkoFjyvUQWcBo1F3RgSglky3ar0+qHLM41PlFVYB4Bj3RD8E/Mv7kqMouLFBU+3aFglMzzctAIWRwajEuueSw==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.5", + "rc-util": "^5.36.0" + } + }, "rc-steps": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/rc-steps/-/rc-steps-4.1.3.tgz", @@ -116361,6 +117685,32 @@ "rc-util": "^5.0.1" } }, + "rc-table": { + "version": "7.45.7", + "resolved": "https://registry.npmjs.org/rc-table/-/rc-table-7.45.7.tgz", + "integrity": "sha512-wi9LetBL1t1csxyGkMB2p3mCiMt+NDexMlPbXHvQFmBBAsMxrgNSAPwUci2zDLUq9m8QdWc1Nh8suvrpy9mXrg==", + "requires": { + "@babel/runtime": "^7.10.1", + "@rc-component/context": "^1.4.0", + "classnames": "^2.2.5", + "rc-resize-observer": "^1.1.0", + "rc-util": "^5.37.0", + "rc-virtual-list": "^3.14.2" + }, + "dependencies": { + "rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "requires": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + } + } + } + }, "rc-tabs": { "version": "11.7.2", "resolved": "https://registry.npmjs.org/rc-tabs/-/rc-tabs-11.7.2.tgz", @@ -116385,16 +117735,50 @@ "rc-resize-observer": "^0.2.3" } }, - "rc-trigger": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.2.0.tgz", - "integrity": "sha512-fpC1ZkM/IgIIDfF6XHx3Hb2zXy9wvdI5eMh+6DdLygk6Z3HGmkri6ZCXg9a0wfF9AFuzlYTeBLS1uRASZRsnMQ==", + "rc-tooltip": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/rc-tooltip/-/rc-tooltip-6.2.0.tgz", + "integrity": "sha512-iS/3iOAvtDh9GIx1ulY7EFUXUtktFccNLsARo3NPgLf0QW9oT0w3dA9cYWlhqAKmD+uriEwdWz1kH0Qs4zk2Aw==", "requires": { "@babel/runtime": "^7.11.2", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.3.1" + } + }, + "rc-tree": { + "version": "5.8.8", + "resolved": "https://registry.npmjs.org/rc-tree/-/rc-tree-5.8.8.tgz", + "integrity": "sha512-S+mCMWo91m5AJqjz3PdzKilGgbFm7fFJRFiTDOcoRbD7UfMOPnerXwMworiga0O2XIo383UoWuEfeHs1WOltag==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-motion": "^2.0.1", + "rc-util": "^5.16.1", + "rc-virtual-list": "^3.5.1" + } + }, + "rc-tree-select": { + "version": "5.21.0", + "resolved": "https://registry.npmjs.org/rc-tree-select/-/rc-tree-select-5.21.0.tgz", + "integrity": "sha512-w+9qEu6zh0G3wt9N/hzWNSnqYH1i9mH1Nqxo0caxLRRFXF5yZWYmpCDoDTMdQM1Y4z3Q5yj08qyrPH/d4AtumA==", + "requires": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-select": "~14.14.0", + "rc-tree": "~5.8.1", + "rc-util": "^5.16.1" + } + }, + "rc-trigger": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.3.4.tgz", + "integrity": "sha512-mQv+vas0TwKcjAO2izNPkqR4j86OemLRmvL2nOzdP9OWNWA1ivoTt5hzFqYNW9zACwmTezRiN8bttrC7cZzYSw==", + "requires": { + "@babel/runtime": "^7.18.3", "classnames": "^2.2.6", "rc-align": "^4.0.0", "rc-motion": "^2.0.0", - "rc-util": "^5.5.0" + "rc-util": "^5.19.2" } }, "rc-upload": { @@ -116408,9 +117792,9 @@ } }, "rc-util": { - "version": "5.38.1", - "resolved": "https://registry.npmjs.org/rc-util/-/rc-util-5.38.1.tgz", - "integrity": "sha512-e4ZMs7q9XqwTuhIK7zBIVFltUtMSjphuPPQXHoHlzRzNdOwUxDejo0Zls5HYaJfRKNURcsS/ceKVULlhjBrxng==", + "version": "5.43.0", + "resolved": "https://registry.npmjs.org/rc-util/-/rc-util-5.43.0.tgz", + "integrity": "sha512-AzC7KKOXFqAdIBqdGWepL9Xn7cm3vnAmjlHqUnoQaTMZYhM4VlXGLkkHHxj/BZ7Td0+SOPKB4RGPboBVKT9htw==", "requires": { "@babel/runtime": "^7.18.3", "react-is": "^18.2.0" @@ -116423,6 +117807,30 @@ } } }, + "rc-virtual-list": { + "version": "3.14.3", + "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.14.3.tgz", + "integrity": "sha512-6+6wiEhdqakNBnbRJymgMlh+90qpkgqherTRo1l1cX7mK6F9hWsazPczmP0lA+64yhC9/t+M9Dh5pjvDWimn8A==", + "requires": { + "@babel/runtime": "^7.20.0", + "classnames": "^2.2.6", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.36.0" + }, + "dependencies": { + "rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "requires": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + } + } + } + }, "re-resizable": { "version": "6.9.11", "resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.9.11.tgz", @@ -120542,6 +121950,11 @@ "integrity": "sha512-WKexMoJj3vEuK0yFEapj8y64V0A6xcuPuK9Gt1d0R+dzCSJc0lHqQytAbSB4cDAK0dWh4T0E2ETkoLE2WZ41OQ==", "dev": true }, + "throttle-debounce": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-5.0.2.tgz", + "integrity": "sha512-B71/4oyj61iNH0KeCamLuE2rmKuTO5byTOSVwECM5FA7TiAiAW+UqTKZ9ERueC4qvgSttUhdmq1mXC3kJqGX7A==" + }, "throttleit": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", diff --git a/superset-frontend/package.json b/superset-frontend/package.json index 341db057e08b..fa6724c4d554 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -122,6 +122,7 @@ "abortcontroller-polyfill": "^1.1.9", "ace-builds": "^1.4.14", "antd": "4.10.3", + "antd-v5": "npm:antd@^5.18.0", "babel-plugin-typescript-to-proptypes": "^2.0.0", "bootstrap": "^3.4.1", "brace": "^0.11.1", @@ -159,6 +160,7 @@ "polished": "^4.3.1", "prop-types": "^15.7.2", "query-string": "^6.13.7", + "rc-trigger": "^5.3.4", "re-resizable": "^6.9.11", "react": "^16.13.1", "react-ace": "^10.1.0", diff --git a/superset-frontend/src/components/AntdThemeProvider/index.tsx b/superset-frontend/src/components/AntdThemeProvider/index.tsx new file mode 100644 index 000000000000..03cc8e2c488a --- /dev/null +++ b/superset-frontend/src/components/AntdThemeProvider/index.tsx @@ -0,0 +1,27 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { ConfigProvider, type ConfigProviderProps } from 'antd-v5'; +import { getTheme, ThemeType } from 'src/theme/index'; + +export const AntdThemeProvider = ({ theme, children }: ConfigProviderProps) => ( + + {children} + +); diff --git a/superset-frontend/src/theme/index.ts b/superset-frontend/src/theme/index.ts new file mode 100644 index 000000000000..d31dadc661a9 --- /dev/null +++ b/superset-frontend/src/theme/index.ts @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { type ThemeConfig } from 'antd-v5'; +import { theme as supersetTheme } from 'src/preamble'; +import { lightAlgorithm } from './light'; + +export enum ThemeType { + LIGHT = 'light', +} + +const themes = { + [ThemeType.LIGHT]: lightAlgorithm, +}; + +const baseConfig: ThemeConfig = { + token: { + borderRadius: supersetTheme.borderRadius, + colorBgBase: supersetTheme.colors.primary.light4, + colorError: supersetTheme.colors.error.base, + colorInfo: supersetTheme.colors.info.base, + colorLink: supersetTheme.colors.grayscale.dark1, + colorPrimary: supersetTheme.colors.primary.base, + colorSuccess: supersetTheme.colors.success.base, + colorTextBase: supersetTheme.colors.grayscale.dark2, + colorWarning: supersetTheme.colors.warning.base, + controlHeight: supersetTheme.gridUnit * 32, + fontFamily: supersetTheme.typography.families.sansSerif, + fontFamilyCode: supersetTheme.typography.families.monospace, + fontSize: supersetTheme.typography.sizes.m, + lineType: 'solid', + lineWidth: 1, + sizeStep: supersetTheme.gridUnit, + sizeUnit: supersetTheme.gridUnit, + zIndexBase: 0, + zIndexPopupBase: supersetTheme.zIndex.max, + }, + components: { + Badge: { + paddingXS: supersetTheme.gridUnit * 2, + }, + Card: { + colorBgContainer: supersetTheme.colors.grayscale.light4, + paddingLG: supersetTheme.gridUnit * 6, + fontWeightStrong: supersetTheme.typography.weights.medium, + }, + }, +}; + +export const getTheme = (themeType?: ThemeType) => ({ + ...baseConfig, + algorithm: themes[themeType || ThemeType.LIGHT], +}); diff --git a/superset-frontend/src/theme/light.ts b/superset-frontend/src/theme/light.ts new file mode 100644 index 000000000000..61591d99071a --- /dev/null +++ b/superset-frontend/src/theme/light.ts @@ -0,0 +1,119 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { type MappingAlgorithm, theme } from 'antd-v5'; +import { theme as supersetTheme } from 'src/preamble'; + +export const lightAlgorithm: MappingAlgorithm = seedToken => { + const defaultTokens = theme.defaultAlgorithm(seedToken); + return { + // Map Tokens + ...defaultTokens, + borderRadiusLG: supersetTheme.borderRadius, + borderRadiusOuter: supersetTheme.borderRadius, + borderRadiusSM: supersetTheme.borderRadius, + borderRadiusXS: supersetTheme.borderRadius, + + colorBgContainer: supersetTheme.colors.primary.light4, + colorBgElevated: supersetTheme.colors.primary.base, + colorBgLayout: supersetTheme.colors.grayscale.light4, + colorBgMask: supersetTheme.colors.grayscale.light2, + colorBgSpotlight: supersetTheme.colors.grayscale.dark1, + + colorBorder: supersetTheme.colors.grayscale.light2, + colorBorderSecondary: supersetTheme.colors.grayscale.light3, + + colorErrorActive: supersetTheme.colors.error.dark1, + colorErrorBg: supersetTheme.colors.error.light2, + colorErrorBgActive: supersetTheme.colors.error.light1, + colorErrorBgHover: supersetTheme.colors.error.light2, + colorErrorBorder: supersetTheme.colors.error.light1, + colorErrorBorderHover: supersetTheme.colors.error.light1, + colorErrorHover: supersetTheme.colors.error.base, + colorErrorText: supersetTheme.colors.error.base, + colorErrorTextActive: supersetTheme.colors.error.dark1, + colorErrorTextHover: supersetTheme.colors.error.base, + + colorFill: supersetTheme.colors.grayscale.light4, + colorFillSecondary: supersetTheme.colors.grayscale.light2, + colorFillTertiary: supersetTheme.colors.grayscale.light3, + + colorInfoActive: supersetTheme.colors.info.dark1, + colorInfoBg: supersetTheme.colors.info.light2, + colorInfoBgHover: supersetTheme.colors.info.light1, + colorInfoBorder: supersetTheme.colors.info.light1, + colorInfoBorderHover: supersetTheme.colors.info.dark1, + colorInfoHover: supersetTheme.colors.info.dark1, + colorInfoText: supersetTheme.colors.info.dark1, + colorInfoTextActive: supersetTheme.colors.info.dark2, + colorInfoTextHover: supersetTheme.colors.info.dark1, + + colorLinkActive: supersetTheme.colors.info.dark2, + colorLinkHover: supersetTheme.colors.info.dark1, + + colorPrimaryActive: supersetTheme.colors.primary.dark2, + colorPrimaryBg: supersetTheme.colors.primary.light4, + colorPrimaryBgHover: supersetTheme.colors.primary.light3, + colorPrimaryBorder: supersetTheme.colors.primary.light2, + colorPrimaryBorderHover: supersetTheme.colors.primary.light1, + colorPrimaryHover: supersetTheme.colors.primary.dark1, + colorPrimaryText: supersetTheme.colors.primary.dark1, + colorPrimaryTextActive: supersetTheme.colors.primary.dark2, + colorPrimaryTextHover: supersetTheme.colors.primary.dark1, + + colorSuccessActive: supersetTheme.colors.success.dark1, + colorSuccessBg: supersetTheme.colors.success.light2, + colorSuccessBgHover: supersetTheme.colors.success.light1, + colorSuccessBorder: supersetTheme.colors.success.light1, + colorSuccessBorderHover: supersetTheme.colors.success.dark1, + colorSuccessHover: supersetTheme.colors.success.dark1, + colorSuccessText: supersetTheme.colors.success.dark1, + colorSuccessTextActive: supersetTheme.colors.success.dark2, + colorSuccessTextHover: supersetTheme.colors.success.dark1, + + colorText: supersetTheme.colors.grayscale.dark2, + colorTextQuaternary: supersetTheme.colors.grayscale.light1, + colorTextSecondary: supersetTheme.colors.text.label, + colorTextTertiary: supersetTheme.colors.text.help, + + colorWarningActive: supersetTheme.colors.warning.dark1, + colorWarningBg: supersetTheme.colors.warning.light2, + colorWarningBgHover: supersetTheme.colors.warning.light1, + colorWarningBorder: supersetTheme.colors.warning.light1, + colorWarningBorderHover: supersetTheme.colors.warning.dark1, + colorWarningHover: supersetTheme.colors.warning.dark1, + colorWarningText: supersetTheme.colors.warning.dark1, + colorWarningTextActive: supersetTheme.colors.warning.dark2, + colorWarningTextHover: supersetTheme.colors.warning.dark1, + + colorWhite: supersetTheme.colors.grayscale.light5, + + fontSizeHeading1: supersetTheme.typography.sizes.xxl, + fontSizeHeading2: supersetTheme.typography.sizes.xl, + fontSizeHeading3: supersetTheme.typography.sizes.l, + fontSizeHeading4: supersetTheme.typography.sizes.m, + fontSizeHeading5: supersetTheme.typography.sizes.s, + + fontSizeLG: supersetTheme.typography.sizes.l, + fontSizeSM: supersetTheme.typography.sizes.s, + fontSizeXL: supersetTheme.typography.sizes.xl, + + lineWidthBold: supersetTheme.gridUnit / 2, + }; +}; diff --git a/superset-frontend/src/views/RootContextProviders.tsx b/superset-frontend/src/views/RootContextProviders.tsx index 12bdd7d86f4d..c0bc5c3af140 100644 --- a/superset-frontend/src/views/RootContextProviders.tsx +++ b/superset-frontend/src/views/RootContextProviders.tsx @@ -17,7 +17,6 @@ * under the License. */ -import { FC } from 'react'; import { Route } from 'react-router-dom'; import { getExtensionsRegistry, ThemeProvider } from '@superset-ui/core'; import { Provider as ReduxProvider } from 'react-redux'; @@ -25,6 +24,7 @@ import { QueryParamProvider } from 'use-query-params'; import { DndProvider } from 'react-dnd'; import { HTML5Backend } from 'react-dnd-html5-backend'; import getBootstrapData from 'src/utils/getBootstrapData'; +import { AntdThemeProvider } from '../components/AntdThemeProvider'; import { store } from './store'; import FlashProvider from '../components/FlashProvider'; import { theme } from '../preamble'; @@ -35,35 +35,37 @@ const { common } = getBootstrapData(); const extensionsRegistry = getExtensionsRegistry(); -export const RootContextProviders: FC = ({ children }) => { +export const RootContextProviders: React.FC = ({ children }) => { const RootContextProviderExtension = extensionsRegistry.get( 'root.context.provider', ); return ( - - - - - - - {RootContextProviderExtension ? ( - - {children} - - ) : ( - children - )} - - - - - - + + + + + + + + {RootContextProviderExtension ? ( + + {children} + + ) : ( + children + )} + + + + + + + ); }; From 66bc8ceddd69d4f9bf6b854ec38054fdacb6aa1d Mon Sep 17 00:00:00 2001 From: Geido <60598000+geido@users.noreply.github.com> Date: Fri, 28 Jun 2024 16:19:58 +0200 Subject: [PATCH 10/31] chore(Table): Add aria-label to Table page size selector (#29391) --- .../src/DataTable/components/SelectPageSize.tsx | 7 ++++++- .../plugins/plugin-chart-table/src/TableChart.tsx | 9 +++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/superset-frontend/plugins/plugin-chart-table/src/DataTable/components/SelectPageSize.tsx b/superset-frontend/plugins/plugin-chart-table/src/DataTable/components/SelectPageSize.tsx index d97d6411b789..02052f103041 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/DataTable/components/SelectPageSize.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/DataTable/components/SelectPageSize.tsx @@ -48,8 +48,13 @@ function DefaultSelectRenderer({ const [size, text] = Array.isArray(option) ? option : [option, option]; + const sizeLabel = size === 0 ? t('all') : size; return ( - ); diff --git a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx index 37bdbed9d1c7..a87371f440f0 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx @@ -181,10 +181,10 @@ function SearchInput({ count, value, onChange }: SearchInputProps) { {t('Search')}{' '} @@ -211,8 +211,13 @@ function SelectPageSize({ const [size, text] = Array.isArray(option) ? option : [option, option]; + const sizeLabel = size === 0 ? t('all') : size; return ( - ); From ed3c66b6a6f8b3ebc9092faa666c84d445b2ea8a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 28 Jun 2024 09:30:58 -0600 Subject: [PATCH 11/31] chore(deps): bump scroll-into-view-if-needed from 2.2.28 to 3.1.0 in /superset-frontend (#28816) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-frontend/package-lock.json | 82 ++++++++++++++--------------- superset-frontend/package.json | 2 +- 2 files changed, 42 insertions(+), 42 deletions(-) diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 07871cab9be2..03cb16269688 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -133,7 +133,7 @@ "regenerator-runtime": "^0.13.5", "rimraf": "^3.0.2", "rison": "^0.1.1", - "scroll-into-view-if-needed": "^2.2.28", + "scroll-into-view-if-needed": "^3.1.0", "shortid": "^2.2.16", "tinycolor2": "^1.4.2", "urijs": "^1.19.8", @@ -26145,11 +26145,6 @@ "react-dom": ">=16.9.0" } }, - "node_modules/antd-v5/node_modules/compute-scroll-into-view": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.1.0.tgz", - "integrity": "sha512-rj8l8pD4bJ1nx+dAkMhV1xB5RuZEyVysfxJqB1pRchh1KVvwOv9b7CGB8ZfjTImVv2oF+sYMUkMZq6Na5Ftmbg==" - }, "node_modules/antd-v5/node_modules/rc-cascader": { "version": "3.26.0", "resolved": "https://registry.npmjs.org/rc-cascader/-/rc-cascader-3.26.0.tgz", @@ -26439,14 +26434,6 @@ "react-dom": ">=16.9.0" } }, - "node_modules/antd-v5/node_modules/scroll-into-view-if-needed": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.1.0.tgz", - "integrity": "sha512-49oNpRjWRvnU8NyGVmUaYG4jtTkNonFZI86MmGRDqBphEK2EXT9gdEUoQPZhuBM8yWHxCWbobltqYO5M4XrUvQ==", - "dependencies": { - "compute-scroll-into-view": "^3.0.2" - } - }, "node_modules/antd/node_modules/@ant-design/colors": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/@ant-design/colors/-/colors-5.0.1.tgz", @@ -26497,6 +26484,11 @@ "react": ">=16.9.0" } }, + "node_modules/antd/node_modules/compute-scroll-into-view": { + "version": "1.0.20", + "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-1.0.20.tgz", + "integrity": "sha512-UCB0ioiyj8CRjtrvaceBLqqhZCVP+1B8+NWQhmdsm0VXOJtobBCf1dBQmebCCo34qZmUwZfIH2MZLqNHazrfjg==" + }, "node_modules/antd/node_modules/rc-dialog": { "version": "8.5.3", "resolved": "https://registry.npmjs.org/rc-dialog/-/rc-dialog-8.5.3.tgz", @@ -26685,6 +26677,14 @@ "react-dom": "*" } }, + "node_modules/antd/node_modules/scroll-into-view-if-needed": { + "version": "2.2.31", + "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.31.tgz", + "integrity": "sha512-dGCXy99wZQivjmjIqihaBQNjryrz5rueJY7eHfTdyWEiR4ttYpsajb14rn9s5d4DY4EcY6+4+U/maARBXJedkA==", + "dependencies": { + "compute-scroll-into-view": "^1.0.20" + } + }, "node_modules/anymatch": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", @@ -29823,9 +29823,9 @@ } }, "node_modules/compute-scroll-into-view": { - "version": "1.0.17", - "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-1.0.17.tgz", - "integrity": "sha512-j4dx+Fb0URmzbwwMUrhqWM2BEWHdFGx+qZ9qqASHRPqvTYdqvWnHg0H1hIbcyLnvgnoNAVMlwkepyqM3DaIFUg==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.1.0.tgz", + "integrity": "sha512-rj8l8pD4bJ1nx+dAkMhV1xB5RuZEyVysfxJqB1pRchh1KVvwOv9b7CGB8ZfjTImVv2oF+sYMUkMZq6Na5Ftmbg==" }, "node_modules/concat-map": { "version": "0.0.1", @@ -61308,11 +61308,11 @@ } }, "node_modules/scroll-into-view-if-needed": { - "version": "2.2.28", - "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.28.tgz", - "integrity": "sha512-8LuxJSuFVc92+0AdNv4QOxRL4Abeo1DgLnGNkn1XlaujPH/3cCFz3QI60r2VNu4obJJROzgnIUw5TKQkZvZI1w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.1.0.tgz", + "integrity": "sha512-49oNpRjWRvnU8NyGVmUaYG4jtTkNonFZI86MmGRDqBphEK2EXT9gdEUoQPZhuBM8yWHxCWbobltqYO5M4XrUvQ==", "dependencies": { - "compute-scroll-into-view": "^1.0.17" + "compute-scroll-into-view": "^3.0.2" } }, "node_modules/secure-json-parse": { @@ -93384,6 +93384,11 @@ "resize-observer-polyfill": "^1.5.0" } }, + "compute-scroll-into-view": { + "version": "1.0.20", + "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-1.0.20.tgz", + "integrity": "sha512-UCB0ioiyj8CRjtrvaceBLqqhZCVP+1B8+NWQhmdsm0VXOJtobBCf1dBQmebCCo34qZmUwZfIH2MZLqNHazrfjg==" + }, "rc-dialog": { "version": "8.5.3", "resolved": "https://registry.npmjs.org/rc-dialog/-/rc-dialog-8.5.3.tgz", @@ -93512,6 +93517,14 @@ "rc-tree": "^4.0.0", "rc-util": "^5.0.5" } + }, + "scroll-into-view-if-needed": { + "version": "2.2.31", + "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.31.tgz", + "integrity": "sha512-dGCXy99wZQivjmjIqihaBQNjryrz5rueJY7eHfTdyWEiR4ttYpsajb14rn9s5d4DY4EcY6+4+U/maARBXJedkA==", + "requires": { + "compute-scroll-into-view": "^1.0.20" + } } } }, @@ -93570,11 +93583,6 @@ "throttle-debounce": "^5.0.0" }, "dependencies": { - "compute-scroll-into-view": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.1.0.tgz", - "integrity": "sha512-rj8l8pD4bJ1nx+dAkMhV1xB5RuZEyVysfxJqB1pRchh1KVvwOv9b7CGB8ZfjTImVv2oF+sYMUkMZq6Na5Ftmbg==" - }, "rc-cascader": { "version": "3.26.0", "resolved": "https://registry.npmjs.org/rc-cascader/-/rc-cascader-3.26.0.tgz", @@ -93776,14 +93784,6 @@ "classnames": "^2.2.5", "rc-util": "^5.2.0" } - }, - "scroll-into-view-if-needed": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.1.0.tgz", - "integrity": "sha512-49oNpRjWRvnU8NyGVmUaYG4jtTkNonFZI86MmGRDqBphEK2EXT9gdEUoQPZhuBM8yWHxCWbobltqYO5M4XrUvQ==", - "requires": { - "compute-scroll-into-view": "^3.0.2" - } } } }, @@ -96227,9 +96227,9 @@ } }, "compute-scroll-into-view": { - "version": "1.0.17", - "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-1.0.17.tgz", - "integrity": "sha512-j4dx+Fb0URmzbwwMUrhqWM2BEWHdFGx+qZ9qqASHRPqvTYdqvWnHg0H1hIbcyLnvgnoNAVMlwkepyqM3DaIFUg==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.1.0.tgz", + "integrity": "sha512-rj8l8pD4bJ1nx+dAkMhV1xB5RuZEyVysfxJqB1pRchh1KVvwOv9b7CGB8ZfjTImVv2oF+sYMUkMZq6Na5Ftmbg==" }, "concat-map": { "version": "0.0.1", @@ -119940,11 +119940,11 @@ "peer": true }, "scroll-into-view-if-needed": { - "version": "2.2.28", - "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.28.tgz", - "integrity": "sha512-8LuxJSuFVc92+0AdNv4QOxRL4Abeo1DgLnGNkn1XlaujPH/3cCFz3QI60r2VNu4obJJROzgnIUw5TKQkZvZI1w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.1.0.tgz", + "integrity": "sha512-49oNpRjWRvnU8NyGVmUaYG4jtTkNonFZI86MmGRDqBphEK2EXT9gdEUoQPZhuBM8yWHxCWbobltqYO5M4XrUvQ==", "requires": { - "compute-scroll-into-view": "^1.0.17" + "compute-scroll-into-view": "^3.0.2" } }, "secure-json-parse": { diff --git a/superset-frontend/package.json b/superset-frontend/package.json index fa6724c4d554..34c8ebdd2140 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -198,7 +198,7 @@ "regenerator-runtime": "^0.13.5", "rimraf": "^3.0.2", "rison": "^0.1.1", - "scroll-into-view-if-needed": "^2.2.28", + "scroll-into-view-if-needed": "^3.1.0", "shortid": "^2.2.16", "tinycolor2": "^1.4.2", "urijs": "^1.19.8", From f128718d342827bde646e0cd0c64d0b756cbb173 Mon Sep 17 00:00:00 2001 From: Jan Suleiman Date: Fri, 28 Jun 2024 17:31:54 +0200 Subject: [PATCH 12/31] docs: fix typos (#29400) --- docs/docs/contributing/development.mdx | 10 +++++----- docs/docs/installation/docker-builds.mdx | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/docs/contributing/development.mdx b/docs/docs/contributing/development.mdx index 74759f716687..679c6a7aaf65 100644 --- a/docs/docs/contributing/development.mdx +++ b/docs/docs/contributing/development.mdx @@ -40,13 +40,13 @@ Note that: - A Superset **Celery worker**, also mounting the local python repo/code - A Superset **Node service**, mounting, compiling and bundling the JS/TS assets - A Superset **Node websocket service** to power the async backend - - **Postgres** as the metadata database and to store example datasets, charts and dashboards whic + - **Postgres** as the metadata database and to store example datasets, charts and dashboards which should be populated upon startup - **Redis** as the message queue for our async backend and caching backend - It'll load up examples into the database upon first startup - all other details and pointers available in [docker-compose.yml](https://github.com/apache/superset/blob/master/docker-compose.yml) -- The local repository is mounted withing the services, meaning updating +- The local repository is mounted within the services, meaning updating the code on the host will be reflected in the docker images - Superset is served at localhost:8088/ - You can login with admin/admin @@ -231,7 +231,7 @@ We recommend using [nvm](https://github.com/nvm-sh/nvm) to manage your node envi ```bash curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.0/install.sh | bash -incase it shows '-bash: nvm: command not found' +in case it shows '-bash: nvm: command not found' export NVM_DIR="$HOME/.nvm" [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm [ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion @@ -261,7 +261,7 @@ cd superset-frontend npm ci ``` -Note that Superset uses [Scarf](https://docs.scarf.sh) to capture telemetry/analytics about versions being installed, including the `scarf-js` npm package and an analytics pixel. As noted elsewhere in this documentation, Scarf gathers aggregated stats for the sake of security/release strategy, and does not capture/retain PII. [You can read here](https://docs.scarf.sh/package-analytics/) about the `scarf-js` package, and various means to opt out of it, but you can opt out of the npm package _and_ the pixel by setting the `SCARF_ANALYTICS` envinronment variable to `false` or opt out of the pixel by adding this setting in `superset-frontent/package.json`: +Note that Superset uses [Scarf](https://docs.scarf.sh) to capture telemetry/analytics about versions being installed, including the `scarf-js` npm package and an analytics pixel. As noted elsewhere in this documentation, Scarf gathers aggregated stats for the sake of security/release strategy, and does not capture/retain PII. [You can read here](https://docs.scarf.sh/package-analytics/) about the `scarf-js` package, and various means to opt out of it, but you can opt out of the npm package _and_ the pixel by setting the `SCARF_ANALYTICS` environment variable to `false` or opt out of the pixel by adding this setting in `superset-frontent/package.json`: ```json // your-package/package.json @@ -769,7 +769,7 @@ To contribute a plugin to Superset, your plugin must meet the following criteria - The plugin should be written with TypeScript - The plugin should contain sufficient unit/e2e tests - The plugin should use appropriate namespacing, e.g. a folder name of `plugin-chart-whatever` and a package name of `@superset-ui/plugin-chart-whatever` -- The plugin should use them variables via Emotion, as passed in by the ThemeProvider +- The plugin should use theme variables via Emotion, as passed in by the ThemeProvider - The plugin should provide adequate error handling (no data returned, malformed data, invalid controls, etc.) - The plugin should contain documentation in the form of a populated `README.md` file - The plugin should have a meaningful and unique icon diff --git a/docs/docs/installation/docker-builds.mdx b/docs/docs/installation/docker-builds.mdx index 4b1052a0950f..a67222cf2415 100644 --- a/docs/docs/installation/docker-builds.mdx +++ b/docs/docs/installation/docker-builds.mdx @@ -63,7 +63,7 @@ To accelerate builds, we follow Docker best practices and use `apache/superset-c ## About database drivers Our docker images come with little to zero database driver support since -each envrionment requires different drivers, and mataining a build with +each environment requires different drivers, and maintaining a build with wide database support would be both challenging (dozens of databases, python drivers, and os dependencies) and inefficient (longer build times, larger images, lower layer cache hit rate, ...). @@ -79,7 +79,7 @@ docker-compose to point to these images and effectively be multi-platform as well. Pull requests and master builds -are one-image-per-platform so that they can be parallized and the +are one-image-per-platform so that they can be parallelized and the build matrix for those is more sparse as we don't need to build every build preset on every platform, and generally can be more selective here. For those builds, we suffix tags with `-arm` where it applies. From a3f0d00714cafe01d32a39a4df56ec6d4b15087c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C4=90=E1=BB=97=20Tr=E1=BB=8Dng=20H=E1=BA=A3i?= <41283691+hainenber@users.noreply.github.com> Date: Fri, 28 Jun 2024 23:22:19 +0700 Subject: [PATCH 13/31] refactor(src/explore/comp/controls/metricControl): migrate Enzyme test to RTL syntax (#29380) Signed-off-by: hainenber Co-authored-by: Michael S. Molina <70410625+michael-s-molina@users.noreply.github.com> --- .../MetricDefinitionOption.test.jsx | 53 ---------------- .../MetricDefinitionOption.test.tsx | 60 +++++++++++++++++++ 2 files changed, 60 insertions(+), 53 deletions(-) delete mode 100644 superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionOption.test.jsx create mode 100644 superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionOption.test.tsx diff --git a/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionOption.test.jsx b/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionOption.test.jsx deleted file mode 100644 index 64326fd799a3..000000000000 --- a/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionOption.test.jsx +++ /dev/null @@ -1,53 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import configureStore from 'redux-mock-store'; -import { shallow } from 'enzyme'; - -import MetricDefinitionOption from 'src/explore/components/controls/MetricControl/MetricDefinitionOption'; -import AggregateOption from 'src/explore/components/controls/MetricControl/AggregateOption'; -import { - StyledMetricOption, - StyledColumnOption, -} from 'src/explore/components/optionRenderers'; - -describe('MetricDefinitionOption', () => { - const mockStore = configureStore([]); - const store = mockStore({}); - - function setup(props) { - return shallow().dive(); - } - - it('renders a StyledMetricOption given a saved metric', () => { - const wrapper = setup({ - option: { metric_name: 'a_saved_metric', expression: 'COUNT(*)' }, - }); - expect(wrapper.find(StyledMetricOption)).toExist(); - }); - - it('renders a StyledColumnOption given a column', () => { - const wrapper = setup({ option: { column_name: 'a_column' } }); - expect(wrapper.find(StyledColumnOption)).toExist(); - }); - - it('renders an AggregateOption given an aggregate metric', () => { - const wrapper = setup({ option: { aggregate_name: 'an_aggregate' } }); - expect(wrapper.find(AggregateOption)).toExist(); - }); -}); diff --git a/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionOption.test.tsx b/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionOption.test.tsx new file mode 100644 index 000000000000..bb2c71d249ff --- /dev/null +++ b/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionOption.test.tsx @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { render, screen } from 'spec/helpers/testing-library'; + +import MetricDefinitionOption from 'src/explore/components/controls/MetricControl/MetricDefinitionOption'; +import userEvent from '@testing-library/user-event'; + +type MetricDefinitionOptionProps = { + option: { + metric_name?: string; + expression?: string; + column_name?: string; + aggregate_name?: string; + }; +}; + +const renderMetricDefinitionOption = (props: MetricDefinitionOptionProps) => { + render(, { + useRedux: true, + useRouter: true, + }); +}; + +test('renders a given saved metric and display SQL expression popover when hovered', async () => { + renderMetricDefinitionOption({ + option: { metric_name: 'a_saved_metric', expression: 'COUNT(*)' }, + }); + expect(await screen.findByText('a_saved_metric')).toBeInTheDocument(); + + // Grab calculator icon and mock mouse hovering over it + const calculatorIcon = await screen.findByLabelText('calculator'); + userEvent.hover(calculatorIcon); + expect(await screen.findByText('SQL expression')).toBeInTheDocument(); +}); + +test('renders when given a column', async () => { + renderMetricDefinitionOption({ option: { column_name: 'a_column' } }); + expect(await screen.findByText('a_column')).toBeInTheDocument(); +}); + +test('renders when given an aggregate metric', async () => { + renderMetricDefinitionOption({ option: { aggregate_name: 'an_aggregate' } }); + expect(await screen.findByText('an_aggregate')).toBeInTheDocument(); +}); From 8fb8199a55257ae5121300aa3bd28d14d327bc33 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Fri, 28 Jun 2024 12:33:56 -0700 Subject: [PATCH 14/31] chore(dao/command): Add transaction decorator to try to enforce "unit of work" (#24969) --- pyproject.toml | 1 + scripts/permissions_cleanup.py | 7 +- scripts/python_tests.sh | 1 + superset/cachekeys/api.py | 8 +- superset/cli/examples.py | 2 + superset/cli/main.py | 2 + superset/cli/test.py | 11 +- superset/cli/update.py | 3 + .../annotation_layer/annotation/create.py | 10 +- .../annotation_layer/annotation/delete.py | 11 +- .../annotation_layer/annotation/update.py | 12 +- superset/commands/annotation_layer/create.py | 10 +- superset/commands/annotation_layer/delete.py | 11 +- superset/commands/annotation_layer/update.py | 12 +- superset/commands/chart/create.py | 14 +-- superset/commands/chart/delete.py | 11 +- superset/commands/chart/importers/v1/utils.py | 2 +- superset/commands/chart/update.py | 27 ++-- superset/commands/css/delete.py | 11 +- superset/commands/dashboard/create.py | 13 +- superset/commands/dashboard/delete.py | 11 +- superset/commands/dashboard/importers/v0.py | 3 +- .../commands/dashboard/importers/v1/utils.py | 2 +- .../commands/dashboard/permalink/create.py | 54 ++++---- superset/commands/dashboard/update.py | 33 ++--- superset/commands/database/create.py | 14 +-- superset/commands/database/delete.py | 11 +- .../commands/database/ssh_tunnel/create.py | 11 +- .../commands/database/ssh_tunnel/delete.py | 10 +- .../commands/database/ssh_tunnel/update.py | 29 +++-- superset/commands/database/update.py | 32 ++--- superset/commands/database/uploaders/base.py | 10 +- superset/commands/dataset/columns/delete.py | 11 +- superset/commands/dataset/create.py | 19 +-- superset/commands/dataset/delete.py | 11 +- superset/commands/dataset/duplicate.py | 115 +++++++++--------- superset/commands/dataset/importers/v0.py | 5 +- .../commands/dataset/importers/v1/utils.py | 2 +- superset/commands/dataset/metrics/delete.py | 11 +- superset/commands/dataset/refresh.py | 14 +-- superset/commands/dataset/update.py | 27 ++-- superset/commands/explore/permalink/create.py | 66 +++++----- superset/commands/importers/v1/__init__.py | 6 +- superset/commands/importers/v1/assets.py | 18 +-- superset/commands/importers/v1/examples.py | 5 +- superset/commands/key_value/create.py | 13 +- superset/commands/key_value/delete.py | 19 ++- superset/commands/key_value/delete_expired.py | 11 +- superset/commands/key_value/update.py | 11 +- superset/commands/key_value/upsert.py | 26 ++-- superset/commands/query/delete.py | 11 +- superset/commands/report/create.py | 10 +- superset/commands/report/delete.py | 11 +- superset/commands/report/execute.py | 7 +- superset/commands/report/log_prune.py | 14 +-- superset/commands/report/update.py | 13 +- superset/commands/security/create.py | 9 +- superset/commands/security/delete.py | 10 +- superset/commands/security/update.py | 12 +- superset/commands/sql_lab/execute.py | 22 +++- superset/commands/tag/create.py | 54 ++++---- superset/commands/tag/delete.py | 27 ++-- superset/commands/tag/update.py | 20 +-- superset/commands/temporary_cache/create.py | 11 +- superset/commands/temporary_cache/delete.py | 11 +- superset/commands/temporary_cache/update.py | 11 +- superset/connectors/sqla/models.py | 5 +- superset/daos/base.py | 48 ++------ superset/daos/chart.py | 2 - superset/daos/dashboard.py | 12 +- superset/daos/database.py | 6 +- superset/daos/dataset.py | 17 +-- superset/daos/exceptions.py | 24 ---- superset/daos/query.py | 2 - superset/daos/report.py | 40 ++---- superset/daos/tag.py | 66 ++-------- superset/daos/user.py | 1 - superset/dashboards/api.py | 10 +- superset/databases/api.py | 3 +- superset/db_engine_specs/gsheets.py | 2 +- superset/db_engine_specs/hive.py | 2 +- superset/db_engine_specs/impala.py | 2 +- superset/db_engine_specs/presto.py | 2 +- superset/db_engine_specs/trino.py | 1 + superset/examples/bart_lines.py | 1 - superset/examples/birth_names.py | 3 - superset/examples/country_map.py | 1 - superset/examples/css_templates.py | 2 - superset/examples/deck.py | 1 - superset/examples/energy.py | 3 - superset/examples/flights.py | 1 - superset/examples/helpers.py | 3 - superset/examples/long_lat.py | 1 - superset/examples/misc_dashboard.py | 1 - superset/examples/multiformat_time_series.py | 1 - superset/examples/paris.py | 1 - superset/examples/random_time_series.py | 2 - superset/examples/sf_population_polygons.py | 1 - .../examples/supported_charts_dashboard.py | 3 - superset/examples/tabbed_dashboard.py | 3 - superset/examples/world_bank.py | 6 +- superset/extensions/metastore_cache.py | 7 +- superset/extensions/pylint.py | 17 +++ superset/initialization/__init__.py | 2 + superset/key_value/shared_entries.py | 2 - superset/models/dashboard.py | 2 +- superset/queries/api.py | 4 +- superset/row_level_security/api.py | 6 +- superset/security/manager.py | 4 - superset/sql_lab.py | 2 + superset/sqllab/sql_json_executer.py | 3 + superset/tags/models.py | 1 + superset/tasks/celery_app.py | 2 +- superset/utils/database.py | 5 +- superset/utils/decorators.py | 63 ++++++++++ superset/utils/lock.py | 4 - superset/utils/log.py | 2 +- superset/views/base.py | 3 +- superset/views/core.py | 4 +- superset/views/dashboard/views.py | 2 +- superset/views/datasource/views.py | 2 +- superset/views/key_value.py | 2 +- superset/views/sql_lab/views.py | 2 +- tests/integration_tests/base_tests.py | 3 +- tests/integration_tests/charts/api_tests.py | 1 - .../charts/data/api_tests.py | 3 + tests/integration_tests/conftest.py | 4 - tests/integration_tests/core_tests.py | 2 +- tests/integration_tests/dashboard_tests.py | 6 +- .../dashboards/commands_tests.py | 1 - .../integration_tests/databases/api_tests.py | 3 - tests/integration_tests/datasets/api_tests.py | 17 +-- tests/integration_tests/datasource_tests.py | 2 - tests/integration_tests/embedded/api_tests.py | 1 + tests/integration_tests/embedded/dao_tests.py | 6 +- tests/integration_tests/embedded/test_view.py | 2 + .../fixtures/unicode_dashboard.py | 5 +- .../security/row_level_security_tests.py | 2 - tests/integration_tests/sqla_models_tests.py | 3 +- tests/integration_tests/sqllab_tests.py | 1 - .../integration_tests/superset_test_config.py | 1 + tests/integration_tests/tags/dao_tests.py | 3 +- .../commands/databases/create_test.py | 2 - .../commands/databases/update_test.py | 4 - tests/unit_tests/dao/tag_test.py | 7 -- tests/unit_tests/dao/user_test.py | 1 - tests/unit_tests/databases/api_test.py | 2 +- .../ssh_tunnel/commands/create_test.py | 2 +- .../databases/ssh_tunnel/dao_tests.py | 1 - tests/unit_tests/security/manager_test.py | 1 - tests/unit_tests/utils/lock_tests.py | 51 ++++---- 151 files changed, 682 insertions(+), 917 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 771761130836..efb211c0d34f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -240,6 +240,7 @@ ignore_basepython_conflict = true commands = superset db upgrade superset init + superset load-test-users # use -s to be able to use break pointers. # no args or tests/* can be passed as an argument to run all tests pytest -s {posargs} diff --git a/scripts/permissions_cleanup.py b/scripts/permissions_cleanup.py index 0416f5580682..22e58f013fa3 100644 --- a/scripts/permissions_cleanup.py +++ b/scripts/permissions_cleanup.py @@ -17,8 +17,10 @@ from collections import defaultdict from superset import security_manager +from superset.utils.decorators import transaction +@transaction() def cleanup_permissions() -> None: # 1. Clean up duplicates. pvms = security_manager.get_session.query( @@ -29,7 +31,6 @@ def cleanup_permissions() -> None: for pvm in pvms: pvms_dict[(pvm.permission, pvm.view_menu)].append(pvm) duplicates = [v for v in pvms_dict.values() if len(v) > 1] - len(duplicates) for pvm_list in duplicates: first_prm = pvm_list[0] @@ -38,7 +39,6 @@ def cleanup_permissions() -> None: roles = roles.union(pvm.role) security_manager.get_session.delete(pvm) first_prm.roles = list(roles) - security_manager.get_session.commit() pvms = security_manager.get_session.query( security_manager.permissionview_model @@ -52,7 +52,6 @@ def cleanup_permissions() -> None: for pvm in pvms: if not (pvm.view_menu and pvm.permission): security_manager.get_session.delete(pvm) - security_manager.get_session.commit() pvms = security_manager.get_session.query( security_manager.permissionview_model @@ -63,7 +62,6 @@ def cleanup_permissions() -> None: roles = security_manager.get_session.query(security_manager.role_model).all() for role in roles: role.permissions = [p for p in role.permissions if p] - security_manager.get_session.commit() # 4. Delete empty roles from permission view menus pvms = security_manager.get_session.query( @@ -71,7 +69,6 @@ def cleanup_permissions() -> None: ).all() for pvm in pvms: pvm.role = [r for r in pvm.role if r] - security_manager.get_session.commit() cleanup_permissions() diff --git a/scripts/python_tests.sh b/scripts/python_tests.sh index c3f27d17f78c..e127d0c02062 100755 --- a/scripts/python_tests.sh +++ b/scripts/python_tests.sh @@ -29,6 +29,7 @@ echo "Superset config module: $SUPERSET_CONFIG" superset db upgrade superset init +superset load-test-users echo "Running tests" diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py index 91cae29b8dc5..093d81b1c3f7 100644 --- a/superset/cachekeys/api.py +++ b/superset/cachekeys/api.py @@ -113,8 +113,10 @@ def invalidate(self) -> Response: delete_stmt = CacheKey.__table__.delete().where( # pylint: disable=no-member CacheKey.cache_key.in_(cache_keys) ) - db.session.execute(delete_stmt) - db.session.commit() + + with db.session.begin_nested(): + db.session.execute(delete_stmt) + stats_logger_manager.instance.gauge( "invalidated_cache", len(cache_keys) ) @@ -125,7 +127,5 @@ def invalidate(self) -> Response: ) except SQLAlchemyError as ex: # pragma: no cover logger.error(ex, exc_info=True) - db.session.rollback() return self.response_500(str(ex)) - db.session.commit() return self.response(201) diff --git a/superset/cli/examples.py b/superset/cli/examples.py index 3ce136ada7bc..51b89f964100 100755 --- a/superset/cli/examples.py +++ b/superset/cli/examples.py @@ -20,6 +20,7 @@ from flask.cli import with_appcontext import superset.utils.database as database_utils +from superset.utils.decorators import transaction logger = logging.getLogger(__name__) @@ -89,6 +90,7 @@ def load_examples_run( @click.command() @with_appcontext +@transaction() @click.option("--load-test-data", "-t", is_flag=True, help="Load additional test data") @click.option("--load-big-data", "-b", is_flag=True, help="Load additional big data") @click.option( diff --git a/superset/cli/main.py b/superset/cli/main.py index aa7e3068f8b9..ffe3278b11a0 100755 --- a/superset/cli/main.py +++ b/superset/cli/main.py @@ -27,6 +27,7 @@ from superset import app, appbuilder, cli, security_manager from superset.cli.lib import normalize_token from superset.extensions import db +from superset.utils.decorators import transaction logger = logging.getLogger(__name__) @@ -60,6 +61,7 @@ def make_shell_context() -> dict[str, Any]: @superset.command() @with_appcontext +@transaction() def init() -> None: """Inits the Superset application""" appbuilder.add_permissions(update_perms=True) diff --git a/superset/cli/test.py b/superset/cli/test.py index f175acec470c..60ea532cbdba 100755 --- a/superset/cli/test.py +++ b/superset/cli/test.py @@ -22,12 +22,14 @@ import superset.utils.database as database_utils from superset import app, security_manager +from superset.utils.decorators import transaction logger = logging.getLogger(__name__) @click.command() @with_appcontext +@transaction() def load_test_users() -> None: """ Loads admin, alpha, and gamma user for testing purposes @@ -35,15 +37,7 @@ def load_test_users() -> None: Syncs permissions for those users/roles """ print(Fore.GREEN + "Loading a set of users for unit tests") - load_test_users_run() - -def load_test_users_run() -> None: - """ - Loads admin, alpha, and gamma user for testing purposes - - Syncs permissions for those users/roles - """ if app.config["TESTING"]: sm = security_manager @@ -84,4 +78,3 @@ def load_test_users_run() -> None: sm.find_role(role), password="general", ) - sm.get_session.commit() diff --git a/superset/cli/update.py b/superset/cli/update.py index 9ff1f3bf58bf..c162bb1e56ea 100755 --- a/superset/cli/update.py +++ b/superset/cli/update.py @@ -30,6 +30,7 @@ from flask_appbuilder.api.manager import resolver import superset.utils.database as database_utils +from superset.utils.decorators import transaction from superset.utils.encrypt import SecretsMigrator logger = logging.getLogger(__name__) @@ -37,6 +38,7 @@ @click.command() @with_appcontext +@transaction() @click.option("--database_name", "-d", help="Database name to change") @click.option("--uri", "-u", help="Database URI to change") @click.option( @@ -53,6 +55,7 @@ def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None: @click.command() @with_appcontext +@transaction() def sync_tags() -> None: """Rebuilds special tags (owner, type, favorited by).""" # pylint: disable=no-member diff --git a/superset/commands/annotation_layer/annotation/create.py b/superset/commands/annotation_layer/annotation/create.py index feed6162cacb..409efd33421a 100644 --- a/superset/commands/annotation_layer/annotation/create.py +++ b/superset/commands/annotation_layer/annotation/create.py @@ -16,6 +16,7 @@ # under the License. import logging from datetime import datetime +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model @@ -30,7 +31,7 @@ from superset.commands.annotation_layer.exceptions import AnnotationLayerNotFoundError from superset.commands.base import BaseCommand from superset.daos.annotation_layer import AnnotationDAO, AnnotationLayerDAO -from superset.daos.exceptions import DAOCreateFailedError +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -39,13 +40,10 @@ class CreateAnnotationCommand(BaseCommand): def __init__(self, data: dict[str, Any]): self._properties = data.copy() + @transaction(on_error=partial(on_error, reraise=AnnotationCreateFailedError)) def run(self) -> Model: self.validate() - try: - return AnnotationDAO.create(attributes=self._properties) - except DAOCreateFailedError as ex: - logger.exception(ex.exception) - raise AnnotationCreateFailedError() from ex + return AnnotationDAO.create(attributes=self._properties) def validate(self) -> None: exceptions: list[ValidationError] = [] diff --git a/superset/commands/annotation_layer/annotation/delete.py b/superset/commands/annotation_layer/annotation/delete.py index 3f48ae2ceb12..125265449ede 100644 --- a/superset/commands/annotation_layer/annotation/delete.py +++ b/superset/commands/annotation_layer/annotation/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from superset.commands.annotation_layer.annotation.exceptions import ( @@ -23,8 +24,8 @@ ) from superset.commands.base import BaseCommand from superset.daos.annotation_layer import AnnotationDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.models.annotations import Annotation +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -34,15 +35,11 @@ def __init__(self, model_ids: list[int]): self._model_ids = model_ids self._models: Optional[list[Annotation]] = None + @transaction(on_error=partial(on_error, reraise=AnnotationDeleteFailedError)) def run(self) -> None: self.validate() assert self._models - - try: - AnnotationDAO.delete(self._models) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise AnnotationDeleteFailedError() from ex + AnnotationDAO.delete(self._models) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/annotation_layer/annotation/update.py b/superset/commands/annotation_layer/annotation/update.py index 9ba07fdcd68d..129b09fcb36f 100644 --- a/superset/commands/annotation_layer/annotation/update.py +++ b/superset/commands/annotation_layer/annotation/update.py @@ -16,6 +16,7 @@ # under the License. import logging from datetime import datetime +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model @@ -31,8 +32,8 @@ from superset.commands.annotation_layer.exceptions import AnnotationLayerNotFoundError from superset.commands.base import BaseCommand from superset.daos.annotation_layer import AnnotationDAO, AnnotationLayerDAO -from superset.daos.exceptions import DAOUpdateFailedError from superset.models.annotations import Annotation +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -43,16 +44,11 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._properties = data.copy() self._model: Optional[Annotation] = None + @transaction(on_error=partial(on_error, reraise=AnnotationUpdateFailedError)) def run(self) -> Model: self.validate() assert self._model - - try: - annotation = AnnotationDAO.update(self._model, self._properties) - except DAOUpdateFailedError as ex: - logger.exception(ex.exception) - raise AnnotationUpdateFailedError() from ex - return annotation + return AnnotationDAO.update(self._model, self._properties) def validate(self) -> None: exceptions: list[ValidationError] = [] diff --git a/superset/commands/annotation_layer/create.py b/superset/commands/annotation_layer/create.py index 6b87ad570363..0f06e2b2744d 100644 --- a/superset/commands/annotation_layer/create.py +++ b/superset/commands/annotation_layer/create.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any from flask_appbuilder.models.sqla import Model @@ -27,7 +28,7 @@ ) from superset.commands.base import BaseCommand from superset.daos.annotation_layer import AnnotationLayerDAO -from superset.daos.exceptions import DAOCreateFailedError +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -36,13 +37,10 @@ class CreateAnnotationLayerCommand(BaseCommand): def __init__(self, data: dict[str, Any]): self._properties = data.copy() + @transaction(on_error=partial(on_error, reraise=AnnotationLayerCreateFailedError)) def run(self) -> Model: self.validate() - try: - return AnnotationLayerDAO.create(attributes=self._properties) - except DAOCreateFailedError as ex: - logger.exception(ex.exception) - raise AnnotationLayerCreateFailedError() from ex + return AnnotationLayerDAO.create(attributes=self._properties) def validate(self) -> None: exceptions: list[ValidationError] = [] diff --git a/superset/commands/annotation_layer/delete.py b/superset/commands/annotation_layer/delete.py index a75ee42b772e..b97b7ac0933f 100644 --- a/superset/commands/annotation_layer/delete.py +++ b/superset/commands/annotation_layer/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from superset.commands.annotation_layer.exceptions import ( @@ -24,8 +25,8 @@ ) from superset.commands.base import BaseCommand from superset.daos.annotation_layer import AnnotationLayerDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.models.annotations import AnnotationLayer +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -35,15 +36,11 @@ def __init__(self, model_ids: list[int]): self._model_ids = model_ids self._models: Optional[list[AnnotationLayer]] = None + @transaction(on_error=partial(on_error, reraise=AnnotationLayerDeleteFailedError)) def run(self) -> None: self.validate() assert self._models - - try: - AnnotationLayerDAO.delete(self._models) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise AnnotationLayerDeleteFailedError() from ex + AnnotationLayerDAO.delete(self._models) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/annotation_layer/update.py b/superset/commands/annotation_layer/update.py index d15440882b15..c4e18bdd09ee 100644 --- a/superset/commands/annotation_layer/update.py +++ b/superset/commands/annotation_layer/update.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model @@ -28,8 +29,8 @@ ) from superset.commands.base import BaseCommand from superset.daos.annotation_layer import AnnotationLayerDAO -from superset.daos.exceptions import DAOUpdateFailedError from superset.models.annotations import AnnotationLayer +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -40,16 +41,11 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._properties = data.copy() self._model: Optional[AnnotationLayer] = None + @transaction(on_error=partial(on_error, reraise=AnnotationLayerUpdateFailedError)) def run(self) -> Model: self.validate() assert self._model - - try: - annotation_layer = AnnotationLayerDAO.update(self._model, self._properties) - except DAOUpdateFailedError as ex: - logger.exception(ex.exception) - raise AnnotationLayerUpdateFailedError() from ex - return annotation_layer + return AnnotationLayerDAO.update(self._model, self._properties) def validate(self) -> None: exceptions: list[ValidationError] = [] diff --git a/superset/commands/chart/create.py b/superset/commands/chart/create.py index 2b251029c3f3..84b3aa29411e 100644 --- a/superset/commands/chart/create.py +++ b/superset/commands/chart/create.py @@ -16,6 +16,7 @@ # under the License. import logging from datetime import datetime +from functools import partial from typing import Any, Optional from flask import g @@ -33,7 +34,7 @@ from superset.commands.utils import get_datasource_by_id from superset.daos.chart import ChartDAO from superset.daos.dashboard import DashboardDAO -from superset.daos.exceptions import DAOCreateFailedError +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -42,15 +43,12 @@ class CreateChartCommand(CreateMixin, BaseCommand): def __init__(self, data: dict[str, Any]): self._properties = data.copy() + @transaction(on_error=partial(on_error, reraise=ChartCreateFailedError)) def run(self) -> Model: self.validate() - try: - self._properties["last_saved_at"] = datetime.now() - self._properties["last_saved_by"] = g.user - return ChartDAO.create(attributes=self._properties) - except DAOCreateFailedError as ex: - logger.exception(ex.exception) - raise ChartCreateFailedError() from ex + self._properties["last_saved_at"] = datetime.now() + self._properties["last_saved_by"] = g.user + return ChartDAO.create(attributes=self._properties) def validate(self) -> None: exceptions = [] diff --git a/superset/commands/chart/delete.py b/superset/commands/chart/delete.py index 8694ae1feb32..00e6d201bcc9 100644 --- a/superset/commands/chart/delete.py +++ b/superset/commands/chart/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from flask_babel import lazy_gettext as _ @@ -28,10 +29,10 @@ ChartNotFoundError, ) from superset.daos.chart import ChartDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.models.slice import Slice +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -41,15 +42,11 @@ def __init__(self, model_ids: list[int]): self._model_ids = model_ids self._models: Optional[list[Slice]] = None + @transaction(on_error=partial(on_error, reraise=ChartDeleteFailedError)) def run(self) -> None: self.validate() assert self._models - - try: - ChartDAO.delete(self._models) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise ChartDeleteFailedError() from ex + ChartDAO.delete(self._models) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/chart/importers/v1/utils.py b/superset/commands/chart/importers/v1/utils.py index 39ca49a5d5ff..35a7f6e2700f 100644 --- a/superset/commands/chart/importers/v1/utils.py +++ b/superset/commands/chart/importers/v1/utils.py @@ -77,7 +77,7 @@ def import_chart( if chart.id is None: db.session.flush() - if user := get_user(): + if (user := get_user()) and user not in chart.owners: chart.owners.append(user) return chart diff --git a/superset/commands/chart/update.py b/superset/commands/chart/update.py index 74b1c30aa83c..d6b212d5ce86 100644 --- a/superset/commands/chart/update.py +++ b/superset/commands/chart/update.py @@ -16,6 +16,7 @@ # under the License. import logging from datetime import datetime +from functools import partial from typing import Any, Optional from flask import g @@ -35,10 +36,10 @@ from superset.commands.utils import get_datasource_by_id, update_tags, validate_tags from superset.daos.chart import ChartDAO from superset.daos.dashboard import DashboardDAO -from superset.daos.exceptions import DAODeleteFailedError, DAOUpdateFailedError from superset.exceptions import SupersetSecurityException from superset.models.slice import Slice from superset.tags.models import ObjectType +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -55,24 +56,20 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._properties = data.copy() self._model: Optional[Slice] = None + @transaction(on_error=partial(on_error, reraise=ChartUpdateFailedError)) def run(self) -> Model: self.validate() assert self._model - try: - # Update tags - tags = self._properties.pop("tags", None) - if tags is not None: - update_tags(ObjectType.chart, self._model.id, self._model.tags, tags) - - if self._properties.get("query_context_generation") is None: - self._properties["last_saved_at"] = datetime.now() - self._properties["last_saved_by"] = g.user - chart = ChartDAO.update(self._model, self._properties) - except (DAOUpdateFailedError, DAODeleteFailedError) as ex: - logger.exception(ex.exception) - raise ChartUpdateFailedError() from ex - return chart + # Update tags + if (tags := self._properties.pop("tags", None)) is not None: + update_tags(ObjectType.chart, self._model.id, self._model.tags, tags) + + if self._properties.get("query_context_generation") is None: + self._properties["last_saved_at"] = datetime.now() + self._properties["last_saved_by"] = g.user + + return ChartDAO.update(self._model, self._properties) def validate(self) -> None: exceptions: list[ValidationError] = [] diff --git a/superset/commands/css/delete.py b/superset/commands/css/delete.py index b8362f6b464d..c6559eb06665 100644 --- a/superset/commands/css/delete.py +++ b/superset/commands/css/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from superset.commands.base import BaseCommand @@ -23,8 +24,8 @@ CssTemplateNotFoundError, ) from superset.daos.css import CssTemplateDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.models.core import CssTemplate +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -34,15 +35,11 @@ def __init__(self, model_ids: list[int]): self._model_ids = model_ids self._models: Optional[list[CssTemplate]] = None + @transaction(on_error=partial(on_error, reraise=CssTemplateDeleteFailedError)) def run(self) -> None: self.validate() assert self._models - - try: - CssTemplateDAO.delete(self._models) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise CssTemplateDeleteFailedError() from ex + CssTemplateDAO.delete(self._models) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/dashboard/create.py b/superset/commands/dashboard/create.py index 1745391238d7..469d3d81af25 100644 --- a/superset/commands/dashboard/create.py +++ b/superset/commands/dashboard/create.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model @@ -28,23 +29,19 @@ ) from superset.commands.utils import populate_roles from superset.daos.dashboard import DashboardDAO -from superset.daos.exceptions import DAOCreateFailedError +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) class CreateDashboardCommand(CreateMixin, BaseCommand): - def __init__(self, data: dict[str, Any]): + def __init__(self, data: dict[str, Any]) -> None: self._properties = data.copy() + @transaction(on_error=partial(on_error, reraise=DashboardCreateFailedError)) def run(self) -> Model: self.validate() - try: - dashboard = DashboardDAO.create(attributes=self._properties, commit=True) - except DAOCreateFailedError as ex: - logger.exception(ex.exception) - raise DashboardCreateFailedError() from ex - return dashboard + return DashboardDAO.create(attributes=self._properties) def validate(self) -> None: exceptions: list[ValidationError] = [] diff --git a/superset/commands/dashboard/delete.py b/superset/commands/dashboard/delete.py index 569d05dac74d..0135c4303f29 100644 --- a/superset/commands/dashboard/delete.py +++ b/superset/commands/dashboard/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from flask_babel import lazy_gettext as _ @@ -28,10 +29,10 @@ DashboardNotFoundError, ) from superset.daos.dashboard import DashboardDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.models.dashboard import Dashboard +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -41,15 +42,11 @@ def __init__(self, model_ids: list[int]): self._model_ids = model_ids self._models: Optional[list[Dashboard]] = None + @transaction(on_error=partial(on_error, reraise=DashboardDeleteFailedError)) def run(self) -> None: self.validate() assert self._models - - try: - DashboardDAO.delete(self._models) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise DashboardDeleteFailedError() from ex + DashboardDAO.delete(self._models) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/dashboard/importers/v0.py b/superset/commands/dashboard/importers/v0.py index a9ee3e484e1c..99090e7d417f 100644 --- a/superset/commands/dashboard/importers/v0.py +++ b/superset/commands/dashboard/importers/v0.py @@ -36,6 +36,7 @@ convert_filter_scopes, copy_filter_scopes, ) +from superset.utils.decorators import transaction logger = logging.getLogger(__name__) @@ -311,7 +312,6 @@ def import_dashboards( for dashboard in data["dashboards"]: import_dashboard(dashboard, dataset_id_mapping, import_time=import_time) - db.session.commit() class ImportDashboardsCommand(BaseCommand): @@ -329,6 +329,7 @@ def __init__( self.contents = contents self.database_id = database_id + @transaction() def run(self) -> None: self.validate() diff --git a/superset/commands/dashboard/importers/v1/utils.py b/superset/commands/dashboard/importers/v1/utils.py index f10afd12bc9e..5e949093b8a8 100644 --- a/superset/commands/dashboard/importers/v1/utils.py +++ b/superset/commands/dashboard/importers/v1/utils.py @@ -188,7 +188,7 @@ def import_dashboard( if dashboard.id is None: db.session.flush() - if user := get_user(): + if (user := get_user()) and user not in dashboard.owners: dashboard.owners.append(user) return dashboard diff --git a/superset/commands/dashboard/permalink/create.py b/superset/commands/dashboard/permalink/create.py index 76b7b8e83453..7d08f78e9a9b 100644 --- a/superset/commands/dashboard/permalink/create.py +++ b/superset/commands/dashboard/permalink/create.py @@ -15,18 +15,22 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from sqlalchemy.exc import SQLAlchemyError -from superset import db from superset.commands.dashboard.permalink.base import BaseDashboardPermalinkCommand from superset.commands.key_value.upsert import UpsertKeyValueCommand from superset.daos.dashboard import DashboardDAO from superset.dashboards.permalink.exceptions import DashboardPermalinkCreateFailedError from superset.dashboards.permalink.types import DashboardPermalinkState -from superset.key_value.exceptions import KeyValueCodecEncodeException +from superset.key_value.exceptions import ( + KeyValueCodecEncodeException, + KeyValueUpsertFailedError, +) from superset.key_value.utils import encode_permalink_key, get_deterministic_uuid from superset.utils.core import get_user_id +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -47,29 +51,33 @@ def __init__( self.dashboard_id = dashboard_id self.state = state + @transaction( + on_error=partial( + on_error, + catches=( + KeyValueCodecEncodeException, + KeyValueUpsertFailedError, + SQLAlchemyError, + ), + reraise=DashboardPermalinkCreateFailedError, + ), + ) def run(self) -> str: self.validate() - try: - dashboard = DashboardDAO.get_by_id_or_slug(self.dashboard_id) - value = { - "dashboardId": str(dashboard.uuid), - "state": self.state, - } - user_id = get_user_id() - key = UpsertKeyValueCommand( - resource=self.resource, - key=get_deterministic_uuid(self.salt, (user_id, value)), - value=value, - codec=self.codec, - ).run() - assert key.id # for type checks - db.session.commit() - return encode_permalink_key(key=key.id, salt=self.salt) - except KeyValueCodecEncodeException as ex: - raise DashboardPermalinkCreateFailedError(str(ex)) from ex - except SQLAlchemyError as ex: - logger.exception("Error running create command") - raise DashboardPermalinkCreateFailedError() from ex + dashboard = DashboardDAO.get_by_id_or_slug(self.dashboard_id) + value = { + "dashboardId": str(dashboard.uuid), + "state": self.state, + } + user_id = get_user_id() + key = UpsertKeyValueCommand( + resource=self.resource, + key=get_deterministic_uuid(self.salt, (user_id, value)), + value=value, + codec=self.codec, + ).run() + assert key.id # for type checks + return encode_permalink_key(key=key.id, salt=self.salt) def validate(self) -> None: pass diff --git a/superset/commands/dashboard/update.py b/superset/commands/dashboard/update.py index 890422602dd6..2effd7bd2ece 100644 --- a/superset/commands/dashboard/update.py +++ b/superset/commands/dashboard/update.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model @@ -31,12 +32,11 @@ ) from superset.commands.utils import populate_roles, update_tags, validate_tags from superset.daos.dashboard import DashboardDAO -from superset.daos.exceptions import DAODeleteFailedError, DAOUpdateFailedError from superset.exceptions import SupersetSecurityException -from superset.extensions import db from superset.models.dashboard import Dashboard from superset.tags.models import ObjectType from superset.utils import json +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -47,29 +47,22 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._properties = data.copy() self._model: Optional[Dashboard] = None + @transaction(on_error=partial(on_error, reraise=DashboardUpdateFailedError)) def run(self) -> Model: self.validate() assert self._model - try: - # Update tags - tags = self._properties.pop("tags", None) - if tags is not None: - update_tags( - ObjectType.dashboard, self._model.id, self._model.tags, tags - ) + # Update tags + if (tags := self._properties.pop("tags", None)) is not None: + update_tags(ObjectType.dashboard, self._model.id, self._model.tags, tags) + + dashboard = DashboardDAO.update(self._model, self._properties) + if self._properties.get("json_metadata"): + DashboardDAO.set_dash_metadata( + dashboard, + data=json.loads(self._properties.get("json_metadata", "{}")), + ) - dashboard = DashboardDAO.update(self._model, self._properties, commit=False) - if self._properties.get("json_metadata"): - dashboard = DashboardDAO.set_dash_metadata( - dashboard, - data=json.loads(self._properties.get("json_metadata", "{}")), - commit=False, - ) - db.session.commit() - except (DAOUpdateFailedError, DAODeleteFailedError) as ex: - logger.exception(ex.exception) - raise DashboardUpdateFailedError() from ex return dashboard def validate(self) -> None: diff --git a/superset/commands/database/create.py b/superset/commands/database/create.py index e66e1110c8df..76dd6087be58 100644 --- a/superset/commands/database/create.py +++ b/superset/commands/database/create.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from flask import current_app @@ -39,11 +40,11 @@ ) from superset.commands.database.test_connection import TestConnectionDatabaseCommand from superset.daos.database import DatabaseDAO -from superset.daos.exceptions import DAOCreateFailedError from superset.databases.ssh_tunnel.models import SSHTunnel from superset.exceptions import SupersetErrorsException -from superset.extensions import db, event_logger, security_manager +from superset.extensions import event_logger, security_manager from superset.models.core import Database +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) stats_logger = current_app.config["STATS_LOGGER"] @@ -53,6 +54,7 @@ class CreateDatabaseCommand(BaseCommand): def __init__(self, data: dict[str, Any]): self._properties = data.copy() + @transaction(on_error=partial(on_error, reraise=DatabaseCreateFailedError)) def run(self) -> Model: self.validate() @@ -96,8 +98,6 @@ def run(self) -> Model: database, ssh_tunnel_properties ).run() - db.session.commit() - # add catalog/schema permissions if database.db_engine_spec.supports_catalog: catalogs = database.get_all_catalog_names( @@ -121,14 +121,12 @@ def run(self) -> Model: except Exception: # pylint: disable=broad-except logger.warning("Error processing catalog '%s'", catalog) continue - except ( SSHTunnelInvalidError, SSHTunnelCreateFailedError, SSHTunnelingNotEnabledError, SSHTunnelDatabasePortError, ) as ex: - db.session.rollback() event_logger.log_with_context( action=f"db_creation_failed.{ex.__class__.__name__}.ssh_tunnel", engine=self._properties.get("sqlalchemy_uri", "").split(":")[0], @@ -136,11 +134,9 @@ def run(self) -> Model: # So we can show the original message raise except ( - DAOCreateFailedError, DatabaseInvalidError, Exception, ) as ex: - db.session.rollback() event_logger.log_with_context( action=f"db_creation_failed.{ex.__class__.__name__}", engine=database.db_engine_spec.__name__, @@ -198,6 +194,6 @@ def validate(self) -> None: raise exception def _create_database(self) -> Database: - database = DatabaseDAO.create(attributes=self._properties, commit=False) + database = DatabaseDAO.create(attributes=self._properties) database.set_sqlalchemy_uri(database.sqlalchemy_uri) return database diff --git a/superset/commands/database/delete.py b/superset/commands/database/delete.py index ce0775506c3a..bf499dac4ff4 100644 --- a/superset/commands/database/delete.py +++ b/superset/commands/database/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from flask_babel import lazy_gettext as _ @@ -27,9 +28,9 @@ DatabaseNotFoundError, ) from superset.daos.database import DatabaseDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.daos.report import ReportScheduleDAO from superset.models.core import Database +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -39,15 +40,11 @@ def __init__(self, model_id: int): self._model_id = model_id self._model: Optional[Database] = None + @transaction(on_error=partial(on_error, reraise=DatabaseDeleteFailedError)) def run(self) -> None: self.validate() assert self._model - - try: - DatabaseDAO.delete([self._model]) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise DatabaseDeleteFailedError() from ex + DatabaseDAO.delete([self._model]) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/database/ssh_tunnel/create.py b/superset/commands/database/ssh_tunnel/create.py index 40083b4b648a..89e607ba67ae 100644 --- a/superset/commands/database/ssh_tunnel/create.py +++ b/superset/commands/database/ssh_tunnel/create.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model @@ -28,10 +29,10 @@ SSHTunnelRequiredFieldValidationError, ) from superset.daos.database import SSHTunnelDAO -from superset.daos.exceptions import DAOCreateFailedError from superset.databases.utils import make_url_safe from superset.extensions import event_logger from superset.models.core import Database +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -44,6 +45,7 @@ def __init__(self, database: Database, data: dict[str, Any]): self._properties["database"] = database self._database = database + @transaction(on_error=partial(on_error, reraise=SSHTunnelCreateFailedError)) def run(self) -> Model: """ Create an SSH tunnel. @@ -53,11 +55,8 @@ def run(self) -> Model: :raises SSHTunnelInvalidError: If the configuration are invalid """ - try: - self.validate() - return SSHTunnelDAO.create(attributes=self._properties, commit=False) - except DAOCreateFailedError as ex: - raise SSHTunnelCreateFailedError() from ex + self.validate() + return SSHTunnelDAO.create(attributes=self._properties) def validate(self) -> None: # TODO(hughhh): check to make sure the server port is not localhost diff --git a/superset/commands/database/ssh_tunnel/delete.py b/superset/commands/database/ssh_tunnel/delete.py index b8919e6d7bae..8c742307aa8d 100644 --- a/superset/commands/database/ssh_tunnel/delete.py +++ b/superset/commands/database/ssh_tunnel/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from superset import is_feature_enabled @@ -25,8 +26,8 @@ SSHTunnelNotFoundError, ) from superset.daos.database import SSHTunnelDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.databases.ssh_tunnel.models import SSHTunnel +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -36,16 +37,13 @@ def __init__(self, model_id: int): self._model_id = model_id self._model: Optional[SSHTunnel] = None + @transaction(on_error=partial(on_error, reraise=SSHTunnelDeleteFailedError)) def run(self) -> None: if not is_feature_enabled("SSH_TUNNELING"): raise SSHTunnelingNotEnabledError() self.validate() assert self._model - - try: - SSHTunnelDAO.delete([self._model]) - except DAODeleteFailedError as ex: - raise SSHTunnelDeleteFailedError() from ex + SSHTunnelDAO.delete([self._model]) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/database/ssh_tunnel/update.py b/superset/commands/database/ssh_tunnel/update.py index d0dd14a5b237..b2fa416bd597 100644 --- a/superset/commands/database/ssh_tunnel/update.py +++ b/superset/commands/database/ssh_tunnel/update.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model @@ -28,9 +29,9 @@ SSHTunnelUpdateFailedError, ) from superset.daos.database import SSHTunnelDAO -from superset.daos.exceptions import DAOUpdateFailedError from superset.databases.ssh_tunnel.models import SSHTunnel from superset.databases.utils import make_url_safe +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -41,25 +42,23 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._model_id = model_id self._model: Optional[SSHTunnel] = None + @transaction(on_error=partial(on_error, reraise=SSHTunnelUpdateFailedError)) def run(self) -> Optional[Model]: self.validate() - try: - if self._model is None: - return None - # unset password if private key is provided - if self._properties.get("private_key"): - self._properties["password"] = None + if self._model is None: + return None - # unset private key and password if password is provided - if self._properties.get("password"): - self._properties["private_key"] = None - self._properties["private_key_password"] = None + # unset password if private key is provided + if self._properties.get("private_key"): + self._properties["password"] = None - tunnel = SSHTunnelDAO.update(self._model, self._properties) - return tunnel - except DAOUpdateFailedError as ex: - raise SSHTunnelUpdateFailedError() from ex + # unset private key and password if password is provided + if self._properties.get("password"): + self._properties["private_key"] = None + self._properties["private_key_password"] = None + + return SSHTunnelDAO.update(self._model, self._properties) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/database/update.py b/superset/commands/database/update.py index 61b0d51ed826..28f895b2f632 100644 --- a/superset/commands/database/update.py +++ b/superset/commands/database/update.py @@ -18,6 +18,7 @@ from __future__ import annotations import logging +from functools import partial from typing import Any from flask_appbuilder.models.sqla import Model @@ -34,16 +35,14 @@ from superset.commands.database.ssh_tunnel.create import CreateSSHTunnelCommand from superset.commands.database.ssh_tunnel.delete import DeleteSSHTunnelCommand from superset.commands.database.ssh_tunnel.exceptions import ( - SSHTunnelError, SSHTunnelingNotEnabledError, ) from superset.commands.database.ssh_tunnel.update import UpdateSSHTunnelCommand from superset.daos.database import DatabaseDAO from superset.daos.dataset import DatasetDAO -from superset.daos.exceptions import DAOCreateFailedError, DAOUpdateFailedError from superset.databases.ssh_tunnel.models import SSHTunnel -from superset.extensions import db from superset.models.core import Database +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -56,6 +55,7 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._model_id = model_id self._model: Database | None = None + @transaction(on_error=partial(on_error, reraise=DatabaseUpdateFailedError)) def run(self) -> Model: self._model = DatabaseDAO.find_by_id(self._model_id) @@ -76,21 +76,10 @@ def run(self) -> Model: # since they're name based original_database_name = self._model.database_name - try: - database = DatabaseDAO.update( - self._model, - self._properties, - commit=False, - ) - database.set_sqlalchemy_uri(database.sqlalchemy_uri) - ssh_tunnel = self._handle_ssh_tunnel(database) - self._refresh_catalogs(database, original_database_name, ssh_tunnel) - except SSHTunnelError: # pylint: disable=try-except-raise - # allow exception to bubble for debugbing information - raise - except (DAOUpdateFailedError, DAOCreateFailedError) as ex: - raise DatabaseUpdateFailedError() from ex - + database = DatabaseDAO.update(self._model, self._properties) + database.set_sqlalchemy_uri(database.sqlalchemy_uri) + ssh_tunnel = self._handle_ssh_tunnel(database) + self._refresh_catalogs(database, original_database_name, ssh_tunnel) return database def _handle_ssh_tunnel(self, database: Database) -> SSHTunnel | None: @@ -101,7 +90,6 @@ def _handle_ssh_tunnel(self, database: Database) -> SSHTunnel | None: return None if not is_feature_enabled("SSH_TUNNELING"): - db.session.rollback() raise SSHTunnelingNotEnabledError() current_ssh_tunnel = DatabaseDAO.get_ssh_tunnel(database.id) @@ -131,13 +119,13 @@ def _get_catalog_names( This method captures a generic exception, since errors could potentially come from any of the 50+ database drivers we support. """ + try: return database.get_all_catalog_names( force=True, ssh_tunnel=ssh_tunnel, ) except Exception as ex: - db.session.rollback() raise DatabaseConnectionFailedError() from ex def _get_schema_names( @@ -152,6 +140,7 @@ def _get_schema_names( This method captures a generic exception, since errors could potentially come from any of the 50+ database drivers we support. """ + try: return database.get_all_schema_names( force=True, @@ -159,7 +148,6 @@ def _get_schema_names( ssh_tunnel=ssh_tunnel, ) except Exception as ex: - db.session.rollback() raise DatabaseConnectionFailedError() from ex def _refresh_catalogs( @@ -225,8 +213,6 @@ def _refresh_catalogs( schemas, ) - db.session.commit() - def _refresh_schemas( self, database: Database, diff --git a/superset/commands/database/uploaders/base.py b/superset/commands/database/uploaders/base.py index b113e9ebf45d..0e939ef4324d 100644 --- a/superset/commands/database/uploaders/base.py +++ b/superset/commands/database/uploaders/base.py @@ -16,11 +16,11 @@ # under the License. import logging from abc import abstractmethod +from functools import partial from typing import Any, Optional, TypedDict import pandas as pd from flask_babel import lazy_gettext as _ -from sqlalchemy.exc import SQLAlchemyError from werkzeug.datastructures import FileStorage from superset import db @@ -37,6 +37,7 @@ from superset.models.core import Database from superset.sql_parse import Table from superset.utils.core import get_user +from superset.utils.decorators import on_error, transaction from superset.views.database.validators import schema_allows_file_upload logger = logging.getLogger(__name__) @@ -144,6 +145,7 @@ def __init__( # pylint: disable=too-many-arguments self._file = file self._reader = reader + @transaction(on_error=partial(on_error, reraise=DatabaseUploadSaveMetadataFailed)) def run(self) -> None: self.validate() if not self._model: @@ -172,12 +174,6 @@ def run(self) -> None: sqla_table.fetch_metadata() - try: - db.session.commit() - except SQLAlchemyError as ex: - db.session.rollback() - raise DatabaseUploadSaveMetadataFailed() from ex - def validate(self) -> None: self._model = DatabaseDAO.find_by_id(self._model_id) if not self._model: diff --git a/superset/commands/dataset/columns/delete.py b/superset/commands/dataset/columns/delete.py index 4739c2520f88..821528de74d4 100644 --- a/superset/commands/dataset/columns/delete.py +++ b/superset/commands/dataset/columns/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from superset import security_manager @@ -26,8 +27,8 @@ ) from superset.connectors.sqla.models import TableColumn from superset.daos.dataset import DatasetColumnDAO, DatasetDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.exceptions import SupersetSecurityException +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -38,15 +39,11 @@ def __init__(self, dataset_id: int, model_id: int): self._model_id = model_id self._model: Optional[TableColumn] = None + @transaction(on_error=partial(on_error, reraise=DatasetColumnDeleteFailedError)) def run(self) -> None: self.validate() assert self._model - - try: - DatasetColumnDAO.delete([self._model]) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise DatasetColumnDeleteFailedError() from ex + DatasetColumnDAO.delete([self._model]) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/dataset/create.py b/superset/commands/dataset/create.py index b72c3ff46ebb..a2d81e548bfb 100644 --- a/superset/commands/dataset/create.py +++ b/superset/commands/dataset/create.py @@ -15,11 +15,11 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError -from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand, CreateMixin from superset.commands.dataset.exceptions import ( @@ -31,10 +31,10 @@ TableNotFoundValidationError, ) from superset.daos.dataset import DatasetDAO -from superset.daos.exceptions import DAOCreateFailedError from superset.exceptions import SupersetSecurityException -from superset.extensions import db, security_manager +from superset.extensions import security_manager from superset.sql_parse import Table +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -43,19 +43,12 @@ class CreateDatasetCommand(CreateMixin, BaseCommand): def __init__(self, data: dict[str, Any]): self._properties = data.copy() + @transaction(on_error=partial(on_error, reraise=DatasetCreateFailedError)) def run(self) -> Model: self.validate() - try: - # Creates SqlaTable (Dataset) - dataset = DatasetDAO.create(attributes=self._properties, commit=False) - # Updates columns and metrics from the dataset - dataset.fetch_metadata(commit=False) - db.session.commit() - except (SQLAlchemyError, DAOCreateFailedError) as ex: - logger.warning(ex, exc_info=True) - db.session.rollback() - raise DatasetCreateFailedError() from ex + dataset = DatasetDAO.create(attributes=self._properties) + dataset.fetch_metadata() return dataset def validate(self) -> None: diff --git a/superset/commands/dataset/delete.py b/superset/commands/dataset/delete.py index 4b7e61ab4c11..27753062aa76 100644 --- a/superset/commands/dataset/delete.py +++ b/superset/commands/dataset/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from superset import security_manager @@ -26,8 +27,8 @@ ) from superset.connectors.sqla.models import SqlaTable from superset.daos.dataset import DatasetDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.exceptions import SupersetSecurityException +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -37,15 +38,11 @@ def __init__(self, model_ids: list[int]): self._model_ids = model_ids self._models: Optional[list[SqlaTable]] = None + @transaction(on_error=partial(on_error, reraise=DatasetDeleteFailedError)) def run(self) -> None: self.validate() assert self._models - - try: - DatasetDAO.delete(self._models) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise DatasetDeleteFailedError() from ex + DatasetDAO.delete(self._models) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/dataset/duplicate.py b/superset/commands/dataset/duplicate.py index efe4935e60af..8e82a7662f65 100644 --- a/superset/commands/dataset/duplicate.py +++ b/superset/commands/dataset/duplicate.py @@ -15,12 +15,12 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any from flask_appbuilder.models.sqla import Model from flask_babel import gettext as __ from marshmallow import ValidationError -from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand, CreateMixin from superset.commands.dataset.exceptions import ( @@ -32,12 +32,12 @@ from superset.commands.exceptions import DatasourceTypeInvalidError from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.daos.dataset import DatasetDAO -from superset.daos.exceptions import DAOCreateFailedError from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import SupersetErrorException from superset.extensions import db from superset.models.core import Database from superset.sql_parse import ParsedQuery, Table +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -47,66 +47,61 @@ def __init__(self, data: dict[str, Any]) -> None: self._base_model: SqlaTable = SqlaTable() self._properties = data.copy() + @transaction(on_error=partial(on_error, reraise=DatasetDuplicateFailedError)) def run(self) -> Model: self.validate() - try: - database_id = self._base_model.database_id - table_name = self._properties["table_name"] - owners = self._properties["owners"] - database = db.session.query(Database).get(database_id) - if not database: - raise SupersetErrorException( - SupersetError( - message=__("The database was not found."), - error_type=SupersetErrorType.DATABASE_NOT_FOUND_ERROR, - level=ErrorLevel.ERROR, - ), - status=404, - ) - table = SqlaTable(table_name=table_name, owners=owners) - table.database = database - table.schema = self._base_model.schema - table.template_params = self._base_model.template_params - table.normalize_columns = self._base_model.normalize_columns - table.always_filter_main_dttm = self._base_model.always_filter_main_dttm - table.is_sqllab_view = True - table.sql = ParsedQuery( - self._base_model.sql, - engine=database.db_engine_spec.engine, - ).stripped() - db.session.add(table) - cols = [] - for config_ in self._base_model.columns: - column_name = config_.column_name - col = TableColumn( - column_name=column_name, - verbose_name=config_.verbose_name, - expression=config_.expression, - filterable=True, - groupby=True, - is_dttm=config_.is_dttm, - type=config_.type, - description=config_.description, - ) - cols.append(col) - table.columns = cols - mets = [] - for config_ in self._base_model.metrics: - metric_name = config_.metric_name - met = SqlMetric( - metric_name=metric_name, - verbose_name=config_.verbose_name, - expression=config_.expression, - metric_type=config_.metric_type, - description=config_.description, - ) - mets.append(met) - table.metrics = mets - db.session.commit() - except (SQLAlchemyError, DAOCreateFailedError) as ex: - logger.warning(ex, exc_info=True) - db.session.rollback() - raise DatasetDuplicateFailedError() from ex + database_id = self._base_model.database_id + table_name = self._properties["table_name"] + owners = self._properties["owners"] + database = db.session.query(Database).get(database_id) + if not database: + raise SupersetErrorException( + SupersetError( + message=__("The database was not found."), + error_type=SupersetErrorType.DATABASE_NOT_FOUND_ERROR, + level=ErrorLevel.ERROR, + ), + status=404, + ) + table = SqlaTable(table_name=table_name, owners=owners) + table.database = database + table.schema = self._base_model.schema + table.template_params = self._base_model.template_params + table.normalize_columns = self._base_model.normalize_columns + table.always_filter_main_dttm = self._base_model.always_filter_main_dttm + table.is_sqllab_view = True + table.sql = ParsedQuery( + self._base_model.sql, + engine=database.db_engine_spec.engine, + ).stripped() + db.session.add(table) + cols = [] + for config_ in self._base_model.columns: + column_name = config_.column_name + col = TableColumn( + column_name=column_name, + verbose_name=config_.verbose_name, + expression=config_.expression, + filterable=True, + groupby=True, + is_dttm=config_.is_dttm, + type=config_.type, + description=config_.description, + ) + cols.append(col) + table.columns = cols + mets = [] + for config_ in self._base_model.metrics: + metric_name = config_.metric_name + met = SqlMetric( + metric_name=metric_name, + verbose_name=config_.verbose_name, + expression=config_.expression, + metric_type=config_.metric_type, + description=config_.description, + ) + mets.append(met) + table.metrics = mets return table def validate(self) -> None: diff --git a/superset/commands/dataset/importers/v0.py b/superset/commands/dataset/importers/v0.py index acfe4a2c9160..d6f7380cb5d1 100644 --- a/superset/commands/dataset/importers/v0.py +++ b/superset/commands/dataset/importers/v0.py @@ -34,6 +34,7 @@ ) from superset.models.core import Database from superset.utils import json +from superset.utils.decorators import transaction from superset.utils.dict_import_export import DATABASES_KEY logger = logging.getLogger(__name__) @@ -211,7 +212,6 @@ def import_from_dict(data: dict[str, Any], sync: Optional[list[str]] = None) -> logger.info("Importing %d %s", len(data.get(DATABASES_KEY, [])), DATABASES_KEY) for database in data.get(DATABASES_KEY, []): Database.import_from_dict(database, sync=sync) - db.session.commit() else: logger.info("Supplied object is not a dictionary.") @@ -240,10 +240,10 @@ def __init__( if kwargs.get("sync_metrics"): self.sync.append("metrics") + @transaction() def run(self) -> None: self.validate() - # TODO (betodealmeida): add rollback in case of error for file_name, config in self._configs.items(): logger.info("Importing dataset from file %s", file_name) if isinstance(config, dict): @@ -260,7 +260,6 @@ def run(self) -> None: ) dataset["database_id"] = database.id SqlaTable.import_from_dict(dataset, sync=self.sync) - db.session.commit() def validate(self) -> None: # ensure all files are YAML diff --git a/superset/commands/dataset/importers/v1/utils.py b/superset/commands/dataset/importers/v1/utils.py index da39be4721c0..1c508fe2522e 100644 --- a/superset/commands/dataset/importers/v1/utils.py +++ b/superset/commands/dataset/importers/v1/utils.py @@ -178,7 +178,7 @@ def import_dataset( if data_uri and (not table_exists or force_data): load_data(data_uri, dataset, dataset.database) - if user := get_user(): + if (user := get_user()) and user not in dataset.owners: dataset.owners.append(user) return dataset diff --git a/superset/commands/dataset/metrics/delete.py b/superset/commands/dataset/metrics/delete.py index b48668852caf..0a749295dc3d 100644 --- a/superset/commands/dataset/metrics/delete.py +++ b/superset/commands/dataset/metrics/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from superset import security_manager @@ -26,8 +27,8 @@ ) from superset.connectors.sqla.models import SqlMetric from superset.daos.dataset import DatasetDAO, DatasetMetricDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.exceptions import SupersetSecurityException +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -38,15 +39,11 @@ def __init__(self, dataset_id: int, model_id: int): self._model_id = model_id self._model: Optional[SqlMetric] = None + @transaction(on_error=partial(on_error, reraise=DatasetMetricDeleteFailedError)) def run(self) -> None: self.validate() assert self._model - - try: - DatasetMetricDAO.delete([self._model]) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise DatasetMetricDeleteFailedError() from ex + DatasetMetricDAO.delete([self._model]) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/dataset/refresh.py b/superset/commands/dataset/refresh.py index 5976956d7ced..9605ac866a95 100644 --- a/superset/commands/dataset/refresh.py +++ b/superset/commands/dataset/refresh.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from flask_appbuilder.models.sqla import Model @@ -29,6 +30,7 @@ from superset.connectors.sqla.models import SqlaTable from superset.daos.dataset import DatasetDAO from superset.exceptions import SupersetSecurityException +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -38,16 +40,12 @@ def __init__(self, model_id: int): self._model_id = model_id self._model: Optional[SqlaTable] = None + @transaction(on_error=partial(on_error, reraise=DatasetRefreshFailedError)) def run(self) -> Model: self.validate() - if self._model: - try: - self._model.fetch_metadata() - return self._model - except Exception as ex: - logger.exception(ex) - raise DatasetRefreshFailedError() from ex - raise DatasetRefreshFailedError() + assert self._model + self._model.fetch_metadata() + return self._model def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/dataset/update.py b/superset/commands/dataset/update.py index 2b521452436e..14d1c5ef4470 100644 --- a/superset/commands/dataset/update.py +++ b/superset/commands/dataset/update.py @@ -16,10 +16,12 @@ # under the License. import logging from collections import Counter +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError +from sqlalchemy.exc import SQLAlchemyError from superset import security_manager from superset.commands.base import BaseCommand, UpdateMixin @@ -39,9 +41,9 @@ ) from superset.connectors.sqla.models import SqlaTable from superset.daos.dataset import DatasetDAO -from superset.daos.exceptions import DAOUpdateFailedError from superset.exceptions import SupersetSecurityException from superset.sql_parse import Table +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -59,19 +61,20 @@ def __init__( self.override_columns = override_columns self._properties["override_columns"] = override_columns + @transaction( + on_error=partial( + on_error, + catches=( + SQLAlchemyError, + ValueError, + ), + reraise=DatasetUpdateFailedError, + ) + ) def run(self) -> Model: self.validate() - if self._model: - try: - dataset = DatasetDAO.update( - self._model, - attributes=self._properties, - ) - return dataset - except DAOUpdateFailedError as ex: - logger.exception(ex.exception) - raise DatasetUpdateFailedError() from ex - raise DatasetUpdateFailedError() + assert self._model + return DatasetDAO.update(self._model, attributes=self._properties) def validate(self) -> None: exceptions: list[ValidationError] = [] diff --git a/superset/commands/explore/permalink/create.py b/superset/commands/explore/permalink/create.py index 731e0b5ce8a0..2128fa4b8c40 100644 --- a/superset/commands/explore/permalink/create.py +++ b/superset/commands/explore/permalink/create.py @@ -15,18 +15,22 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from sqlalchemy.exc import SQLAlchemyError -from superset import db from superset.commands.explore.permalink.base import BaseExplorePermalinkCommand from superset.commands.key_value.create import CreateKeyValueCommand from superset.explore.permalink.exceptions import ExplorePermalinkCreateFailedError from superset.explore.utils import check_access as check_chart_access -from superset.key_value.exceptions import KeyValueCodecEncodeException +from superset.key_value.exceptions import ( + KeyValueCodecEncodeException, + KeyValueCreateFailedError, +) from superset.key_value.utils import encode_permalink_key from superset.utils.core import DatasourceType +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -37,35 +41,39 @@ def __init__(self, state: dict[str, Any]): self.datasource: str = state["formData"]["datasource"] self.state = state + @transaction( + on_error=partial( + on_error, + catches=( + KeyValueCodecEncodeException, + KeyValueCreateFailedError, + SQLAlchemyError, + ), + reraise=ExplorePermalinkCreateFailedError, + ), + ) def run(self) -> str: self.validate() - try: - d_id, d_type = self.datasource.split("__") - datasource_id = int(d_id) - datasource_type = DatasourceType(d_type) - check_chart_access(datasource_id, self.chart_id, datasource_type) - value = { - "chartId": self.chart_id, - "datasourceId": datasource_id, - "datasourceType": datasource_type.value, - "datasource": self.datasource, - "state": self.state, - } - command = CreateKeyValueCommand( - resource=self.resource, - value=value, - codec=self.codec, - ) - key = command.run() - if key.id is None: - raise ExplorePermalinkCreateFailedError("Unexpected missing key id") - db.session.commit() - return encode_permalink_key(key=key.id, salt=self.salt) - except KeyValueCodecEncodeException as ex: - raise ExplorePermalinkCreateFailedError(str(ex)) from ex - except SQLAlchemyError as ex: - logger.exception("Error running create command") - raise ExplorePermalinkCreateFailedError() from ex + d_id, d_type = self.datasource.split("__") + datasource_id = int(d_id) + datasource_type = DatasourceType(d_type) + check_chart_access(datasource_id, self.chart_id, datasource_type) + value = { + "chartId": self.chart_id, + "datasourceId": datasource_id, + "datasourceType": datasource_type.value, + "datasource": self.datasource, + "state": self.state, + } + command = CreateKeyValueCommand( + resource=self.resource, + value=value, + codec=self.codec, + ) + key = command.run() + if key.id is None: + raise ExplorePermalinkCreateFailedError("Unexpected missing key id") + return encode_permalink_key(key=key.id, salt=self.salt) def validate(self) -> None: pass diff --git a/superset/commands/importers/v1/__init__.py b/superset/commands/importers/v1/__init__.py index 25b8b8790f04..f90708acf51f 100644 --- a/superset/commands/importers/v1/__init__.py +++ b/superset/commands/importers/v1/__init__.py @@ -32,6 +32,7 @@ ) from superset.daos.base import BaseDAO from superset.models.core import Database # noqa: F401 +from superset.utils.decorators import transaction class ImportModelsCommand(BaseCommand): @@ -67,18 +68,15 @@ def _import(configs: dict[str, Any], overwrite: bool = False) -> None: def _get_uuids(cls) -> set[str]: return {str(model.uuid) for model in db.session.query(cls.dao.model_cls).all()} + @transaction() def run(self) -> None: self.validate() - # rollback to prevent partial imports try: self._import(self._configs, self.overwrite) - db.session.commit() except CommandException: - db.session.rollback() raise except Exception as ex: - db.session.rollback() raise self.import_error() from ex def validate(self) -> None: # noqa: F811 diff --git a/superset/commands/importers/v1/assets.py b/superset/commands/importers/v1/assets.py index 29a2dec17908..78a2251a293a 100644 --- a/superset/commands/importers/v1/assets.py +++ b/superset/commands/importers/v1/assets.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from functools import partial from typing import Any, Optional from marshmallow import Schema @@ -44,6 +45,7 @@ from superset.migrations.shared.native_filters import migrate_dashboard from superset.models.dashboard import dashboard_slices from superset.queries.saved_queries.schemas import ImportV1SavedQuerySchema +from superset.utils.decorators import on_error, transaction class ImportAssetsCommand(BaseCommand): @@ -153,16 +155,16 @@ def _import(configs: dict[str, Any]) -> None: if chart.viz_type == "filter_box": db.session.delete(chart) + @transaction( + on_error=partial( + on_error, + catches=(Exception,), + reraise=ImportFailedError, + ) + ) def run(self) -> None: self.validate() - - # rollback to prevent partial imports - try: - self._import(self._configs) - db.session.commit() - except Exception as ex: - db.session.rollback() - raise ImportFailedError() from ex + self._import(self._configs) def validate(self) -> None: exceptions: list[ValidationError] = [] diff --git a/superset/commands/importers/v1/examples.py b/superset/commands/importers/v1/examples.py index 6525031ce4f3..bcf6b5062fb9 100644 --- a/superset/commands/importers/v1/examples.py +++ b/superset/commands/importers/v1/examples.py @@ -43,6 +43,7 @@ from superset.models.dashboard import dashboard_slices from superset.utils.core import get_example_default_schema from superset.utils.database import get_example_database +from superset.utils.decorators import transaction class ImportExamplesCommand(ImportModelsCommand): @@ -62,19 +63,17 @@ def __init__(self, contents: dict[str, str], *args: Any, **kwargs: Any): super().__init__(contents, *args, **kwargs) self.force_data = kwargs.get("force_data", False) + @transaction() def run(self) -> None: self.validate() - # rollback to prevent partial imports try: self._import( self._configs, self.overwrite, self.force_data, ) - db.session.commit() except Exception as ex: - db.session.rollback() raise self.import_error() from ex @classmethod diff --git a/superset/commands/key_value/create.py b/superset/commands/key_value/create.py index 7308321e44a7..81b7c4c3d4a9 100644 --- a/superset/commands/key_value/create.py +++ b/superset/commands/key_value/create.py @@ -16,17 +16,17 @@ # under the License. import logging from datetime import datetime +from functools import partial from typing import Any, Optional, Union from uuid import UUID -from sqlalchemy.exc import SQLAlchemyError - from superset import db from superset.commands.base import BaseCommand from superset.key_value.exceptions import KeyValueCreateFailedError from superset.key_value.models import KeyValueEntry from superset.key_value.types import Key, KeyValueCodec, KeyValueResource from superset.utils.core import get_user_id +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -62,6 +62,7 @@ def __init__( # pylint: disable=too-many-arguments self.key = key self.expires_on = expires_on + @transaction(on_error=partial(on_error, reraise=KeyValueCreateFailedError)) def run(self) -> Key: """ Persist the value @@ -69,11 +70,8 @@ def run(self) -> Key: :return: the key associated with the persisted value """ - try: - return self.create() - except SQLAlchemyError as ex: - db.session.rollback() - raise KeyValueCreateFailedError() from ex + + return self.create() def validate(self) -> None: pass @@ -98,6 +96,7 @@ def create(self) -> Key: entry.id = self.key except ValueError as ex: raise KeyValueCreateFailedError() from ex + db.session.add(entry) db.session.flush() return Key(id=entry.id, uuid=entry.uuid) diff --git a/superset/commands/key_value/delete.py b/superset/commands/key_value/delete.py index 37eb7087e6a2..a3fdf079c73c 100644 --- a/superset/commands/key_value/delete.py +++ b/superset/commands/key_value/delete.py @@ -15,17 +15,17 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Union from uuid import UUID -from sqlalchemy.exc import SQLAlchemyError - from superset import db from superset.commands.base import BaseCommand from superset.key_value.exceptions import KeyValueDeleteFailedError from superset.key_value.models import KeyValueEntry from superset.key_value.types import KeyValueResource from superset.key_value.utils import get_filter +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -45,20 +45,19 @@ def __init__(self, resource: KeyValueResource, key: Union[int, UUID]): self.resource = resource self.key = key + @transaction(on_error=partial(on_error, reraise=KeyValueDeleteFailedError)) def run(self) -> bool: - try: - return self.delete() - except SQLAlchemyError as ex: - db.session.rollback() - raise KeyValueDeleteFailedError() from ex + return self.delete() def validate(self) -> None: pass def delete(self) -> bool: - filter_ = get_filter(self.resource, self.key) - if entry := db.session.query(KeyValueEntry).filter_by(**filter_).first(): + if ( + entry := db.session.query(KeyValueEntry) + .filter_by(**get_filter(self.resource, self.key)) + .first() + ): db.session.delete(entry) - db.session.flush() return True return False diff --git a/superset/commands/key_value/delete_expired.py b/superset/commands/key_value/delete_expired.py index 92d45683f222..54991c7531d2 100644 --- a/superset/commands/key_value/delete_expired.py +++ b/superset/commands/key_value/delete_expired.py @@ -16,15 +16,16 @@ # under the License. import logging from datetime import datetime +from functools import partial from sqlalchemy import and_ -from sqlalchemy.exc import SQLAlchemyError from superset import db from superset.commands.base import BaseCommand from superset.key_value.exceptions import KeyValueDeleteFailedError from superset.key_value.models import KeyValueEntry from superset.key_value.types import KeyValueResource +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -41,12 +42,9 @@ def __init__(self, resource: KeyValueResource): """ self.resource = resource + @transaction(on_error=partial(on_error, reraise=KeyValueDeleteFailedError)) def run(self) -> None: - try: - self.delete_expired() - except SQLAlchemyError as ex: - db.session.rollback() - raise KeyValueDeleteFailedError() from ex + self.delete_expired() def validate(self) -> None: pass @@ -62,4 +60,3 @@ def delete_expired(self) -> None: ) .delete() ) - db.session.flush() diff --git a/superset/commands/key_value/update.py b/superset/commands/key_value/update.py index 098c9f860d1b..b6ffc22174f6 100644 --- a/superset/commands/key_value/update.py +++ b/superset/commands/key_value/update.py @@ -17,11 +17,10 @@ import logging from datetime import datetime +from functools import partial from typing import Any, Optional, Union from uuid import UUID -from sqlalchemy.exc import SQLAlchemyError - from superset import db from superset.commands.base import BaseCommand from superset.key_value.exceptions import KeyValueUpdateFailedError @@ -29,6 +28,7 @@ from superset.key_value.types import Key, KeyValueCodec, KeyValueResource from superset.key_value.utils import get_filter from superset.utils.core import get_user_id +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -64,12 +64,9 @@ def __init__( # pylint: disable=too-many-arguments self.codec = codec self.expires_on = expires_on + @transaction(on_error=partial(on_error, reraise=KeyValueUpdateFailedError)) def run(self) -> Optional[Key]: - try: - return self.update() - except SQLAlchemyError as ex: - db.session.rollback() - raise KeyValueUpdateFailedError() from ex + return self.update() def validate(self) -> None: pass diff --git a/superset/commands/key_value/upsert.py b/superset/commands/key_value/upsert.py index 2c985530bf20..32918d9b1439 100644 --- a/superset/commands/key_value/upsert.py +++ b/superset/commands/key_value/upsert.py @@ -17,6 +17,7 @@ import logging from datetime import datetime +from functools import partial from typing import Any, Optional, Union from uuid import UUID @@ -33,6 +34,7 @@ from superset.key_value.types import Key, KeyValueCodec, KeyValueResource from superset.key_value.utils import get_filter from superset.utils.core import get_user_id +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -68,27 +70,29 @@ def __init__( # pylint: disable=too-many-arguments self.codec = codec self.expires_on = expires_on + @transaction( + on_error=partial( + on_error, + catches=(KeyValueCreateFailedError, SQLAlchemyError), + reraise=KeyValueUpsertFailedError, + ), + ) def run(self) -> Key: - try: - return self.upsert() - except (KeyValueCreateFailedError, SQLAlchemyError) as ex: - db.session.rollback() - raise KeyValueUpsertFailedError() from ex + return self.upsert() def validate(self) -> None: pass def upsert(self) -> Key: - filter_ = get_filter(self.resource, self.key) - entry: KeyValueEntry = ( - db.session.query(KeyValueEntry).filter_by(**filter_).first() - ) - if entry: + if ( + entry := db.session.query(KeyValueEntry) + .filter_by(**get_filter(self.resource, self.key)) + .first() + ): entry.value = self.codec.encode(self.value) entry.expires_on = self.expires_on entry.changed_on = datetime.now() entry.changed_by_fk = get_user_id() - db.session.flush() return Key(entry.id, entry.uuid) return CreateKeyValueCommand( diff --git a/superset/commands/query/delete.py b/superset/commands/query/delete.py index 978f30c5c4a8..a93c4038abf4 100644 --- a/superset/commands/query/delete.py +++ b/superset/commands/query/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from superset.commands.base import BaseCommand @@ -22,9 +23,9 @@ SavedQueryDeleteFailedError, SavedQueryNotFoundError, ) -from superset.daos.exceptions import DAODeleteFailedError from superset.daos.query import SavedQueryDAO from superset.models.dashboard import Dashboard +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -34,15 +35,11 @@ def __init__(self, model_ids: list[int]): self._model_ids = model_ids self._models: Optional[list[Dashboard]] = None + @transaction(on_error=partial(on_error, reraise=SavedQueryDeleteFailedError)) def run(self) -> None: self.validate() assert self._models - - try: - SavedQueryDAO.delete(self._models) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise SavedQueryDeleteFailedError() from ex + SavedQueryDAO.delete(self._models) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/report/create.py b/superset/commands/report/create.py index ed1889e8b332..2a67f640022d 100644 --- a/superset/commands/report/create.py +++ b/superset/commands/report/create.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from flask_babel import gettext as _ @@ -31,7 +32,6 @@ ReportScheduleNameUniquenessValidationError, ) from superset.daos.database import DatabaseDAO -from superset.daos.exceptions import DAOCreateFailedError from superset.daos.report import ReportScheduleDAO from superset.reports.models import ( ReportCreationMethod, @@ -40,6 +40,7 @@ ) from superset.reports.types import ReportScheduleExtra from superset.utils import json +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -48,13 +49,10 @@ class CreateReportScheduleCommand(CreateMixin, BaseReportScheduleCommand): def __init__(self, data: dict[str, Any]): self._properties = data.copy() + @transaction(on_error=partial(on_error, reraise=ReportScheduleCreateFailedError)) def run(self) -> ReportSchedule: self.validate() - try: - return ReportScheduleDAO.create(attributes=self._properties) - except DAOCreateFailedError as ex: - logger.exception(ex.exception) - raise ReportScheduleCreateFailedError() from ex + return ReportScheduleDAO.create(attributes=self._properties) def validate(self) -> None: """ diff --git a/superset/commands/report/delete.py b/superset/commands/report/delete.py index 87ea4b99dd01..36e6711105c8 100644 --- a/superset/commands/report/delete.py +++ b/superset/commands/report/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Optional from superset import security_manager @@ -24,10 +25,10 @@ ReportScheduleForbiddenError, ReportScheduleNotFoundError, ) -from superset.daos.exceptions import DAODeleteFailedError from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.reports.models import ReportSchedule +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -37,15 +38,11 @@ def __init__(self, model_ids: list[int]): self._model_ids = model_ids self._models: Optional[list[ReportSchedule]] = None + @transaction(on_error=partial(on_error, reraise=ReportScheduleDeleteFailedError)) def run(self) -> None: self.validate() assert self._models - - try: - ReportScheduleDAO.delete(self._models) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise ReportScheduleDeleteFailedError() from ex + ReportScheduleDAO.delete(self._models) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/report/execute.py b/superset/commands/report/execute.py index 637898a7a0a5..c57828eac497 100644 --- a/superset/commands/report/execute.py +++ b/superset/commands/report/execute.py @@ -69,7 +69,7 @@ from superset.utils import json from superset.utils.core import HeaderDataType, override_user from superset.utils.csv import get_chart_csv_data, get_chart_dataframe -from superset.utils.decorators import logs_context +from superset.utils.decorators import logs_context, transaction from superset.utils.pdf import build_pdf_from_screenshots from superset.utils.screenshots import ChartScreenshot, DashboardScreenshot from superset.utils.urls import get_url_path @@ -120,7 +120,6 @@ def update_report_schedule(self, state: ReportState) -> None: self._report_schedule.last_state = state self._report_schedule.last_eval_dttm = datetime.utcnow() - db.session.commit() def create_log(self, error_message: Optional[str] = None) -> None: """ @@ -138,7 +137,7 @@ def create_log(self, error_message: Optional[str] = None) -> None: uuid=self._execution_id, ) db.session.add(log) - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction def _get_url( self, @@ -690,6 +689,7 @@ def __init__( self._report_schedule = report_schedule self._scheduled_dttm = scheduled_dttm + @transaction() def run(self) -> None: for state_cls in self.states_cls: if (self._report_schedule.last_state is None and state_cls.initial) or ( @@ -718,6 +718,7 @@ def __init__(self, task_id: str, model_id: int, scheduled_dttm: datetime): self._scheduled_dttm = scheduled_dttm self._execution_id = UUID(task_id) + @transaction() def run(self) -> None: try: self.validate() diff --git a/superset/commands/report/log_prune.py b/superset/commands/report/log_prune.py index f14f7856a1e1..a780bf51e033 100644 --- a/superset/commands/report/log_prune.py +++ b/superset/commands/report/log_prune.py @@ -17,12 +17,14 @@ import logging from datetime import datetime, timedelta +from sqlalchemy.exc import SQLAlchemyError + from superset import db from superset.commands.base import BaseCommand from superset.commands.report.exceptions import ReportSchedulePruneLogError -from superset.daos.exceptions import DAODeleteFailedError from superset.daos.report import ReportScheduleDAO from superset.reports.models import ReportSchedule +from superset.utils.decorators import transaction logger = logging.getLogger(__name__) @@ -32,9 +34,7 @@ class AsyncPruneReportScheduleLogCommand(BaseCommand): Prunes logs from all report schedules """ - def __init__(self, worker_context: bool = True): - self._worker_context = worker_context - + @transaction() def run(self) -> None: self.validate() prune_errors = [] @@ -46,15 +46,15 @@ def run(self) -> None: ) try: row_count = ReportScheduleDAO.bulk_delete_logs( - report_schedule, from_date, commit=False + report_schedule, + from_date, ) - db.session.commit() logger.info( "Deleted %s logs for report schedule id: %s", str(row_count), str(report_schedule.id), ) - except DAODeleteFailedError as ex: + except SQLAlchemyError as ex: prune_errors.append(str(ex)) if prune_errors: raise ReportSchedulePruneLogError(";".join(prune_errors)) diff --git a/superset/commands/report/update.py b/superset/commands/report/update.py index ad54f44f0618..2aab3bd8c452 100644 --- a/superset/commands/report/update.py +++ b/superset/commands/report/update.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any, Optional from flask_appbuilder.models.sqla import Model @@ -32,11 +33,11 @@ ReportScheduleUpdateFailedError, ) from superset.daos.database import DatabaseDAO -from superset.daos.exceptions import DAOUpdateFailedError from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.reports.models import ReportSchedule, ReportScheduleType, ReportState from superset.utils import json +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -47,16 +48,10 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._properties = data.copy() self._model: Optional[ReportSchedule] = None + @transaction(on_error=partial(on_error, reraise=ReportScheduleUpdateFailedError)) def run(self) -> Model: self.validate() - assert self._model - - try: - report_schedule = ReportScheduleDAO.update(self._model, self._properties) - except DAOUpdateFailedError as ex: - logger.exception(ex.exception) - raise ReportScheduleUpdateFailedError() from ex - return report_schedule + return ReportScheduleDAO.update(self._model, self._properties) def validate(self) -> None: """ diff --git a/superset/commands/security/create.py b/superset/commands/security/create.py index d70bbb7111a8..0288cf4d0b90 100644 --- a/superset/commands/security/create.py +++ b/superset/commands/security/create.py @@ -23,9 +23,9 @@ from superset.commands.exceptions import DatasourceNotFoundValidationError from superset.commands.utils import populate_roles from superset.connectors.sqla.models import SqlaTable -from superset.daos.exceptions import DAOCreateFailedError from superset.daos.security import RLSDAO from superset.extensions import db +from superset.utils.decorators import transaction logger = logging.getLogger(__name__) @@ -36,13 +36,10 @@ def __init__(self, data: dict[str, Any]): self._tables = self._properties.get("tables", []) self._roles = self._properties.get("roles", []) + @transaction() def run(self) -> Any: self.validate() - try: - return RLSDAO.create(attributes=self._properties) - except DAOCreateFailedError as ex: - logger.exception(ex.exception) - raise + return RLSDAO.create(attributes=self._properties) def validate(self) -> None: roles = populate_roles(self._roles) diff --git a/superset/commands/security/delete.py b/superset/commands/security/delete.py index 2c19c5f89b78..662474c27edc 100644 --- a/superset/commands/security/delete.py +++ b/superset/commands/security/delete.py @@ -16,15 +16,16 @@ # under the License. import logging +from functools import partial from superset.commands.base import BaseCommand from superset.commands.security.exceptions import ( RLSRuleNotFoundError, RuleDeleteFailedError, ) -from superset.daos.exceptions import DAODeleteFailedError from superset.daos.security import RLSDAO from superset.reports.models import ReportSchedule +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -34,13 +35,10 @@ def __init__(self, model_ids: list[int]): self._model_ids = model_ids self._models: list[ReportSchedule] = [] + @transaction(on_error=partial(on_error, reraise=RuleDeleteFailedError)) def run(self) -> None: self.validate() - try: - RLSDAO.delete(self._models) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise RuleDeleteFailedError() from ex + RLSDAO.delete(self._models) def validate(self) -> None: # Validate/populate model exists diff --git a/superset/commands/security/update.py b/superset/commands/security/update.py index 54d7a66a2a23..fa17b249b47b 100644 --- a/superset/commands/security/update.py +++ b/superset/commands/security/update.py @@ -24,9 +24,9 @@ from superset.commands.security.exceptions import RLSRuleNotFoundError from superset.commands.utils import populate_roles from superset.connectors.sqla.models import RowLevelSecurityFilter, SqlaTable -from superset.daos.exceptions import DAOUpdateFailedError from superset.daos.security import RLSDAO from superset.extensions import db +from superset.utils.decorators import transaction logger = logging.getLogger(__name__) @@ -39,17 +39,11 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._roles = self._properties.get("roles", []) self._model: Optional[RowLevelSecurityFilter] = None + @transaction() def run(self) -> Any: self.validate() assert self._model - - try: - rule = RLSDAO.update(self._model, self._properties) - except DAOUpdateFailedError as ex: - logger.exception(ex.exception) - raise - - return rule + return RLSDAO.update(self._model, self._properties) def validate(self) -> None: self._model = RLSDAO.find_by_id(int(self._model_id)) diff --git a/superset/commands/sql_lab/execute.py b/superset/commands/sql_lab/execute.py index 911424af51ce..0c3e33b52916 100644 --- a/superset/commands/sql_lab/execute.py +++ b/superset/commands/sql_lab/execute.py @@ -22,10 +22,11 @@ from typing import Any, TYPE_CHECKING from flask_babel import gettext as __ +from sqlalchemy.exc import SQLAlchemyError +from superset import db from superset.commands.base import BaseCommand from superset.common.db_query_status import QueryStatus -from superset.daos.exceptions import DAOCreateFailedError from superset.errors import SupersetErrorType from superset.exceptions import ( SupersetErrorException, @@ -41,6 +42,7 @@ ) from superset.sqllab.execution_context_convertor import ExecutionContextConvertor from superset.sqllab.limiting_factor import LimitingFactor +from superset.utils.decorators import transaction if TYPE_CHECKING: from superset.daos.database import DatabaseDAO @@ -90,6 +92,7 @@ def __init__( def validate(self) -> None: pass + @transaction() def run( # pylint: disable=too-many-statements,useless-suppression self, ) -> CommandResult: @@ -178,9 +181,22 @@ def _validate_query_db(cls, database: Database | None) -> None: ) def _save_new_query(self, query: Query) -> None: + """ + Saves the new SQL Lab query. + + Committing within a transaction violates the "unit of work" construct, but is + necessary for async querying. The Celery task is defined within the confines + of another command and needs to read a previously committed state given the + `READ COMMITTED` isolation level. + + To mitigate said issue, ideally there would be a command to prepare said query + and another to execute it, either in a sync or async manner. + + :param query: The SQL Lab query + """ try: self._query_dao.create(query) - except DAOCreateFailedError as ex: + except SQLAlchemyError as ex: raise SqlLabException( self._execution_context, SupersetErrorType.GENERIC_DB_ENGINE_ERROR, @@ -189,6 +205,8 @@ def _save_new_query(self, query: Query) -> None: "Please contact an administrator for further assistance or try again.", ) from ex + db.session.commit() # pylint: disable=consider-using-transaction + def _validate_access(self, query: Query) -> None: try: self._access_validator.validate(query) diff --git a/superset/commands/tag/create.py b/superset/commands/tag/create.py index ea23b8d59da1..775250dc8172 100644 --- a/superset/commands/tag/create.py +++ b/superset/commands/tag/create.py @@ -15,16 +15,17 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from typing import Any -from superset import db, security_manager +from superset import security_manager from superset.commands.base import BaseCommand, CreateMixin from superset.commands.tag.exceptions import TagCreateFailedError, TagInvalidError from superset.commands.tag.utils import to_object_model, to_object_type -from superset.daos.exceptions import DAOCreateFailedError from superset.daos.tag import TagDAO from superset.exceptions import SupersetSecurityException from superset.tags.models import ObjectType, TagType +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -35,20 +36,18 @@ def __init__(self, object_type: ObjectType, object_id: int, tags: list[str]): self._object_id = object_id self._tags = tags + @transaction(on_error=partial(on_error, reraise=TagCreateFailedError)) def run(self) -> None: self.validate() - try: - object_type = to_object_type(self._object_type) - if object_type is None: - raise TagCreateFailedError(f"invalid object type {self._object_type}") - TagDAO.create_custom_tagged_objects( - object_type=object_type, - object_id=self._object_id, - tag_names=self._tags, - ) - except DAOCreateFailedError as ex: - logger.exception(ex.exception) - raise TagCreateFailedError() from ex + object_type = to_object_type(self._object_type) + if object_type is None: + raise TagCreateFailedError(f"invalid object type {self._object_type}") + + TagDAO.create_custom_tagged_objects( + object_type=object_type, + object_id=self._object_id, + tag_names=self._tags, + ) def validate(self) -> None: exceptions = [] @@ -71,27 +70,20 @@ def __init__(self, data: dict[str, Any], bulk_create: bool = False): self._bulk_create = bulk_create self._skipped_tagged_objects: set[tuple[str, int]] = set() + @transaction(on_error=partial(on_error, reraise=TagCreateFailedError)) def run(self) -> tuple[set[tuple[str, int]], set[tuple[str, int]]]: self.validate() - try: - tag_name = self._properties["name"] - tag = TagDAO.get_by_name(tag_name.strip(), TagType.custom) - TagDAO.create_tag_relationship( - objects_to_tag=self._properties.get("objects_to_tag", []), - tag=tag, - bulk_create=self._bulk_create, - ) - - tag.description = self._properties.get("description", "") - - db.session.commit() - - return set(self._properties["objects_to_tag"]), self._skipped_tagged_objects + tag_name = self._properties["name"] + tag = TagDAO.get_by_name(tag_name.strip(), TagType.custom) + TagDAO.create_tag_relationship( + objects_to_tag=self._properties.get("objects_to_tag", []), + tag=tag, + bulk_create=self._bulk_create, + ) - except DAOCreateFailedError as ex: - logger.exception(ex.exception) - raise TagCreateFailedError() from ex + tag.description = self._properties.get("description", "") + return set(self._properties["objects_to_tag"]), self._skipped_tagged_objects def validate(self) -> None: exceptions = [] diff --git a/superset/commands/tag/delete.py b/superset/commands/tag/delete.py index c4f22390095d..89a2a5a5568d 100644 --- a/superset/commands/tag/delete.py +++ b/superset/commands/tag/delete.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from functools import partial from superset.commands.base import BaseCommand from superset.commands.tag.exceptions import ( @@ -25,9 +26,9 @@ TagNotFoundError, ) from superset.commands.tag.utils import to_object_type -from superset.daos.exceptions import DAODeleteFailedError from superset.daos.tag import TagDAO from superset.tags.models import ObjectType +from superset.utils.decorators import on_error, transaction from superset.views.base import DeleteMixin logger = logging.getLogger(__name__) @@ -39,18 +40,15 @@ def __init__(self, object_type: ObjectType, object_id: int, tag: str): self._object_id = object_id self._tag = tag + @transaction(on_error=partial(on_error, reraise=TaggedObjectDeleteFailedError)) def run(self) -> None: self.validate() - try: - object_type = to_object_type(self._object_type) - if object_type is None: - raise TaggedObjectDeleteFailedError( - f"invalid object type {self._object_type}" - ) - TagDAO.delete_tagged_object(object_type, self._object_id, self._tag) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise TaggedObjectDeleteFailedError() from ex + object_type = to_object_type(self._object_type) + if object_type is None: + raise TaggedObjectDeleteFailedError( + f"invalid object type {self._object_type}" + ) + TagDAO.delete_tagged_object(object_type, self._object_id, self._tag) def validate(self) -> None: exceptions = [] @@ -92,13 +90,10 @@ class DeleteTagsCommand(DeleteMixin, BaseCommand): def __init__(self, tags: list[str]): self._tags = tags + @transaction(on_error=partial(on_error, reraise=TagDeleteFailedError)) def run(self) -> None: self.validate() - try: - TagDAO.delete_tags(self._tags) - except DAODeleteFailedError as ex: - logger.exception(ex.exception) - raise TagDeleteFailedError() from ex + TagDAO.delete_tags(self._tags) def validate(self) -> None: exceptions = [] diff --git a/superset/commands/tag/update.py b/superset/commands/tag/update.py index 431bf93c4de8..fa5e125414cf 100644 --- a/superset/commands/tag/update.py +++ b/superset/commands/tag/update.py @@ -25,6 +25,7 @@ from superset.commands.tag.utils import to_object_type from superset.daos.tag import TagDAO from superset.tags.models import Tag +from superset.utils.decorators import transaction logger = logging.getLogger(__name__) @@ -35,18 +36,17 @@ def __init__(self, model_id: int, data: dict[str, Any]): self._properties = data.copy() self._model: Optional[Tag] = None + @transaction() def run(self) -> Model: self.validate() - if self._model: - self._model.name = self._properties["name"] - TagDAO.create_tag_relationship( - objects_to_tag=self._properties.get("objects_to_tag", []), - tag=self._model, - ) - self._model.description = self._properties.get("description") - - db.session.add(self._model) - db.session.commit() + assert self._model + self._model.name = self._properties["name"] + TagDAO.create_tag_relationship( + objects_to_tag=self._properties.get("objects_to_tag", []), + tag=self._model, + ) + self._model.description = self._properties.get("description") + db.session.add(self._model) return self._model diff --git a/superset/commands/temporary_cache/create.py b/superset/commands/temporary_cache/create.py index 7d61a7807462..642e812d0206 100644 --- a/superset/commands/temporary_cache/create.py +++ b/superset/commands/temporary_cache/create.py @@ -16,12 +16,12 @@ # under the License. import logging from abc import ABC, abstractmethod - -from sqlalchemy.exc import SQLAlchemyError +from functools import partial from superset.commands.base import BaseCommand from superset.commands.temporary_cache.exceptions import TemporaryCacheCreateFailedError from superset.commands.temporary_cache.parameters import CommandParameters +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -30,12 +30,9 @@ class CreateTemporaryCacheCommand(BaseCommand, ABC): def __init__(self, cmd_params: CommandParameters): self._cmd_params = cmd_params + @transaction(on_error=partial(on_error, reraise=TemporaryCacheCreateFailedError)) def run(self) -> str: - try: - return self.create(self._cmd_params) - except SQLAlchemyError as ex: - logger.exception("Error running create command") - raise TemporaryCacheCreateFailedError() from ex + return self.create(self._cmd_params) def validate(self) -> None: pass diff --git a/superset/commands/temporary_cache/delete.py b/superset/commands/temporary_cache/delete.py index 1cc291dbf6e2..25cc25ec7ad4 100644 --- a/superset/commands/temporary_cache/delete.py +++ b/superset/commands/temporary_cache/delete.py @@ -16,12 +16,12 @@ # under the License. import logging from abc import ABC, abstractmethod - -from sqlalchemy.exc import SQLAlchemyError +from functools import partial from superset.commands.base import BaseCommand from superset.commands.temporary_cache.exceptions import TemporaryCacheDeleteFailedError from superset.commands.temporary_cache.parameters import CommandParameters +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -30,12 +30,9 @@ class DeleteTemporaryCacheCommand(BaseCommand, ABC): def __init__(self, cmd_params: CommandParameters): self._cmd_params = cmd_params + @transaction(on_error=partial(on_error, reraise=TemporaryCacheDeleteFailedError)) def run(self) -> bool: - try: - return self.delete(self._cmd_params) - except SQLAlchemyError as ex: - logger.exception("Error running delete command") - raise TemporaryCacheDeleteFailedError() from ex + return self.delete(self._cmd_params) def validate(self) -> None: pass diff --git a/superset/commands/temporary_cache/update.py b/superset/commands/temporary_cache/update.py index 8daaae8618ba..88bbe18b852c 100644 --- a/superset/commands/temporary_cache/update.py +++ b/superset/commands/temporary_cache/update.py @@ -16,13 +16,13 @@ # under the License. import logging from abc import ABC, abstractmethod +from functools import partial from typing import Optional -from sqlalchemy.exc import SQLAlchemyError - from superset.commands.base import BaseCommand from superset.commands.temporary_cache.exceptions import TemporaryCacheUpdateFailedError from superset.commands.temporary_cache.parameters import CommandParameters +from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) @@ -34,12 +34,9 @@ def __init__( ): self._parameters = cmd_params + @transaction(on_error=partial(on_error, reraise=TemporaryCacheUpdateFailedError)) def run(self) -> Optional[str]: - try: - return self.update(self._parameters) - except SQLAlchemyError as ex: - logger.exception("Error running update command") - raise TemporaryCacheUpdateFailedError() from ex + return self.update(self._parameters) def validate(self) -> None: pass diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 6d8d87a506c0..c38a0085a534 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -1768,11 +1768,10 @@ def get_sqla_table_object(self) -> Table: ) ) - def fetch_metadata(self, commit: bool = True) -> MetadataResult: + def fetch_metadata(self) -> MetadataResult: """ Fetches the metadata for the table and merges it in - :param commit: should the changes be committed or not. :return: Tuple with lists of added, removed and modified column names. """ new_columns = self.external_metadata() @@ -1850,8 +1849,6 @@ def fetch_metadata(self, commit: bool = True) -> MetadataResult: config["SQLA_TABLE_MUTATOR"](self) db.session.merge(self) - if commit: - db.session.commit() return results @classmethod diff --git a/superset/daos/base.py b/superset/daos/base.py index 889a0780f642..e393034062b8 100644 --- a/superset/daos/base.py +++ b/superset/daos/base.py @@ -21,13 +21,8 @@ from flask_appbuilder.models.filters import BaseFilter from flask_appbuilder.models.sqla import Model from flask_appbuilder.models.sqla.interface import SQLAInterface -from sqlalchemy.exc import SQLAlchemyError, StatementError +from sqlalchemy.exc import StatementError -from superset.daos.exceptions import ( - DAOCreateFailedError, - DAODeleteFailedError, - DAOUpdateFailedError, -) from superset.extensions import db T = TypeVar("T", bound=Model) @@ -127,15 +122,12 @@ def create( cls, item: T | None = None, attributes: dict[str, Any] | None = None, - commit: bool = True, ) -> T: """ Create an object from the specified item and/or attributes. :param item: The object to create :param attributes: The attributes associated with the object to create - :param commit: Whether to commit the transaction - :raises DAOCreateFailedError: If the creation failed """ if not item: @@ -145,15 +137,7 @@ def create( for key, value in attributes.items(): setattr(item, key, value) - try: - db.session.add(item) - - if commit: - db.session.commit() - except SQLAlchemyError as ex: # pragma: no cover - db.session.rollback() - raise DAOCreateFailedError(exception=ex) from ex - + db.session.add(item) return item # type: ignore @classmethod @@ -161,15 +145,12 @@ def update( cls, item: T | None = None, attributes: dict[str, Any] | None = None, - commit: bool = True, ) -> T: """ Update an object from the specified item and/or attributes. :param item: The object to update :param attributes: The attributes associated with the object to update - :param commit: Whether to commit the transaction - :raises DAOUpdateFailedError: If the updating failed """ if not item: @@ -179,19 +160,13 @@ def update( for key, value in attributes.items(): setattr(item, key, value) - try: - db.session.merge(item) - - if commit: - db.session.commit() - except SQLAlchemyError as ex: # pragma: no cover - db.session.rollback() - raise DAOUpdateFailedError(exception=ex) from ex + if item not in db.session: + return db.session.merge(item) return item # type: ignore @classmethod - def delete(cls, items: list[T], commit: bool = True) -> None: + def delete(cls, items: list[T]) -> None: """ Delete the specified items including their associated relationships. @@ -204,17 +179,8 @@ def delete(cls, items: list[T], commit: bool = True) -> None: post-deletion logic. :param items: The items to delete - :param commit: Whether to commit the transaction - :raises DAODeleteFailedError: If the deletion failed :see: https://docs.sqlalchemy.org/en/latest/orm/queryguide/dml.html """ - try: - for item in items: - db.session.delete(item) - - if commit: - db.session.commit() - except SQLAlchemyError as ex: - db.session.rollback() - raise DAODeleteFailedError(exception=ex) from ex + for item in items: + db.session.delete(item) diff --git a/superset/daos/chart.py b/superset/daos/chart.py index 844b36b6b3d4..35afb7f7a91b 100644 --- a/superset/daos/chart.py +++ b/superset/daos/chart.py @@ -62,7 +62,6 @@ def add_favorite(chart: Slice) -> None: dttm=datetime.now(), ) ) - db.session.commit() @staticmethod def remove_favorite(chart: Slice) -> None: @@ -77,4 +76,3 @@ def remove_favorite(chart: Slice) -> None: ) if fav: db.session.delete(fav) - db.session.commit() diff --git a/superset/daos/dashboard.py b/superset/daos/dashboard.py index 6c973639b73e..8196c197b248 100644 --- a/superset/daos/dashboard.py +++ b/superset/daos/dashboard.py @@ -179,8 +179,7 @@ def set_dash_metadata( dashboard: Dashboard, data: dict[Any, Any], old_to_new_slice_ids: dict[int, int] | None = None, - commit: bool = False, - ) -> Dashboard: + ) -> None: new_filter_scopes = {} md = dashboard.params_dict @@ -265,10 +264,6 @@ def set_dash_metadata( md["cross_filters_enabled"] = data.get("cross_filters_enabled", True) dashboard.json_metadata = json.dumps(md) - if commit: - db.session.commit() - return dashboard - @staticmethod def favorited_ids(dashboards: list[Dashboard]) -> list[FavStar]: ids = [dash.id for dash in dashboards] @@ -321,7 +316,6 @@ def copy_dashboard( dash.params = original_dash.params cls.set_dash_metadata(dash, metadata, old_to_new_slice_ids) db.session.add(dash) - db.session.commit() return dash @staticmethod @@ -336,7 +330,6 @@ def add_favorite(dashboard: Dashboard) -> None: dttm=datetime.now(), ) ) - db.session.commit() @staticmethod def remove_favorite(dashboard: Dashboard) -> None: @@ -351,7 +344,6 @@ def remove_favorite(dashboard: Dashboard) -> None: ) if fav: db.session.delete(fav) - db.session.commit() class EmbeddedDashboardDAO(BaseDAO[EmbeddedDashboard]): @@ -369,7 +361,6 @@ def upsert(dashboard: Dashboard, allowed_domains: list[str]) -> EmbeddedDashboar ) embedded.allow_domain_list = ",".join(allowed_domains) dashboard.embedded = [embedded] - db.session.commit() return embedded @classmethod @@ -377,7 +368,6 @@ def create( cls, item: EmbeddedDashboardDAO | None = None, attributes: dict[str, Any] | None = None, - commit: bool = True, ) -> Any: """ Use EmbeddedDashboardDAO.upsert() instead. diff --git a/superset/daos/database.py b/superset/daos/database.py index 15fc03710aa7..06b429bb6bf1 100644 --- a/superset/daos/database.py +++ b/superset/daos/database.py @@ -42,7 +42,6 @@ def update( cls, item: Database | None = None, attributes: dict[str, Any] | None = None, - commit: bool = True, ) -> Database: """ Unmask ``encrypted_extra`` before updating. @@ -60,7 +59,7 @@ def update( attributes["encrypted_extra"], ) - return super().update(item, attributes, commit) + return super().update(item, attributes) @staticmethod def validate_uniqueness(database_name: str) -> bool: @@ -174,7 +173,6 @@ def update( cls, item: SSHTunnel | None = None, attributes: dict[str, Any] | None = None, - commit: bool = True, ) -> SSHTunnel: """ Unmask ``password``, ``private_key`` and ``private_key_password`` before updating. @@ -190,7 +188,7 @@ def update( attributes.pop("id", None) attributes = unmask_password_info(attributes, item) - return super().update(item, attributes, commit) + return super().update(item, attributes) class DatabaseUserOAuth2TokensDAO(BaseDAO[DatabaseUserOAuth2Tokens]): diff --git a/superset/daos/dataset.py b/superset/daos/dataset.py index 21c5ae1d0faf..af1b705d6610 100644 --- a/superset/daos/dataset.py +++ b/superset/daos/dataset.py @@ -25,7 +25,6 @@ from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.daos.base import BaseDAO -from superset.daos.exceptions import DAOUpdateFailedError from superset.extensions import db from superset.models.core import Database from superset.models.dashboard import Dashboard @@ -171,7 +170,6 @@ def update( cls, item: SqlaTable | None = None, attributes: dict[str, Any] | None = None, - commit: bool = True, ) -> SqlaTable: """ Updates a Dataset model on the metadata DB @@ -182,21 +180,19 @@ def update( cls.update_columns( item, attributes.pop("columns"), - commit=commit, override_columns=bool(attributes.get("override_columns")), ) if "metrics" in attributes: - cls.update_metrics(item, attributes.pop("metrics"), commit=commit) + cls.update_metrics(item, attributes.pop("metrics")) - return super().update(item, attributes, commit=commit) + return super().update(item, attributes) @classmethod def update_columns( cls, model: SqlaTable, property_columns: list[dict[str, Any]], - commit: bool = True, override_columns: bool = False, ) -> None: """ @@ -217,7 +213,7 @@ def update_columns( if not DatasetDAO.validate_python_date_format( column["python_date_format"] ): - raise DAOUpdateFailedError( + raise ValueError( "python_date_format is an invalid date/timestamp format." ) @@ -266,15 +262,11 @@ def update_columns( ) ).delete(synchronize_session="fetch") - if commit: - db.session.commit() - @classmethod def update_metrics( cls, model: SqlaTable, property_metrics: list[dict[str, Any]], - commit: bool = True, ) -> None: """ Creates/updates and/or deletes a list of metrics, based on a @@ -317,9 +309,6 @@ def update_metrics( ) ).delete(synchronize_session="fetch") - if commit: - db.session.commit() - @classmethod def find_dataset_column(cls, dataset_id: int, column_id: int) -> TableColumn | None: # We want to apply base dataset filters diff --git a/superset/daos/exceptions.py b/superset/daos/exceptions.py index 6fdd5a80d2c6..ebd20fee631a 100644 --- a/superset/daos/exceptions.py +++ b/superset/daos/exceptions.py @@ -23,30 +23,6 @@ class DAOException(SupersetException): """ -class DAOCreateFailedError(DAOException): - """ - DAO Create failed - """ - - message = "Create failed" - - -class DAOUpdateFailedError(DAOException): - """ - DAO Update failed - """ - - message = "Update failed" - - -class DAODeleteFailedError(DAOException): - """ - DAO Delete failed - """ - - message = "Delete failed" - - class DatasourceTypeNotSupportedError(DAOException): """ DAO datasource query source type is not supported diff --git a/superset/daos/query.py b/superset/daos/query.py index ea7c82cc34db..55287ebd9fff 100644 --- a/superset/daos/query.py +++ b/superset/daos/query.py @@ -53,7 +53,6 @@ def update_saved_query_exec_info(query_id: int) -> None: for saved_query in related_saved_queries: saved_query.rows = query.rows saved_query.last_run = datetime.now() - db.session.commit() @staticmethod def save_metadata(query: Query, payload: dict[str, Any]) -> None: @@ -97,7 +96,6 @@ def stop_query(client_id: str) -> None: query.status = QueryStatus.STOPPED query.end_time = now_as_float() - db.session.commit() class SavedQueryDAO(BaseDAO[SavedQuery]): diff --git a/superset/daos/report.py b/superset/daos/report.py index 4662f325878d..8cf305c13f26 100644 --- a/superset/daos/report.py +++ b/superset/daos/report.py @@ -20,10 +20,7 @@ from datetime import datetime from typing import Any -from sqlalchemy.exc import SQLAlchemyError - from superset.daos.base import BaseDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.extensions import db from superset.reports.filters import ReportScheduleFilter from superset.reports.models import ( @@ -137,15 +134,12 @@ def create( cls, item: ReportSchedule | None = None, attributes: dict[str, Any] | None = None, - commit: bool = True, ) -> ReportSchedule: """ Create a report schedule with nested recipients. :param item: The object to create :param attributes: The attributes associated with the object to create - :param commit: Whether to commit the transaction - :raises: DAOCreateFailedError: If the creation failed """ # TODO(john-bodley): Determine why we need special handling for recipients. @@ -165,22 +159,19 @@ def create( for recipient in recipients ] - return super().create(item, attributes, commit) + return super().create(item, attributes) @classmethod def update( cls, item: ReportSchedule | None = None, attributes: dict[str, Any] | None = None, - commit: bool = True, ) -> ReportSchedule: """ Update a report schedule with nested recipients. :param item: The object to update :param attributes: The attributes associated with the object to update - :param commit: Whether to commit the transaction - :raises: DAOUpdateFailedError: If the update failed """ # TODO(john-bodley): Determine why we need special handling for recipients. @@ -200,7 +191,7 @@ def update( for recipient in recipients ] - return super().update(item, attributes, commit) + return super().update(item, attributes) @staticmethod def find_active() -> list[ReportSchedule]: @@ -283,23 +274,12 @@ def find_last_error_notification( return last_error_email_log if not report_from_last_email else None @staticmethod - def bulk_delete_logs( - model: ReportSchedule, - from_date: datetime, - commit: bool = True, - ) -> int | None: - try: - row_count = ( - db.session.query(ReportExecutionLog) - .filter( - ReportExecutionLog.report_schedule == model, - ReportExecutionLog.end_dttm < from_date, - ) - .delete(synchronize_session="fetch") + def bulk_delete_logs(model: ReportSchedule, from_date: datetime) -> int | None: + return ( + db.session.query(ReportExecutionLog) + .filter( + ReportExecutionLog.report_schedule == model, + ReportExecutionLog.end_dttm < from_date, ) - if commit: - db.session.commit() - return row_count - except SQLAlchemyError as ex: - db.session.rollback() - raise DAODeleteFailedError(str(ex)) from ex + .delete(synchronize_session="fetch") + ) diff --git a/superset/daos/tag.py b/superset/daos/tag.py index 46a1d2538f16..b155cf15c152 100644 --- a/superset/daos/tag.py +++ b/superset/daos/tag.py @@ -19,12 +19,11 @@ from typing import Any, Optional from flask import g -from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.exc import NoResultFound from superset.commands.tag.exceptions import TagNotFoundError from superset.commands.tag.utils import to_object_type from superset.daos.base import BaseDAO -from superset.daos.exceptions import DAODeleteFailedError from superset.exceptions import MissingUserContextException from superset.extensions import db from superset.models.dashboard import Dashboard @@ -75,7 +74,6 @@ def create_custom_tagged_objects( ) db.session.add_all(tagged_objects) - db.session.commit() @staticmethod def delete_tagged_object( @@ -86,9 +84,7 @@ def delete_tagged_object( """ tag = TagDAO.find_by_name(tag_name.strip()) if not tag: - raise DAODeleteFailedError( - message=f"Tag with name {tag_name} does not exist." - ) + raise NoResultFound(message=f"Tag with name {tag_name} does not exist.") tagged_object = db.session.query(TaggedObject).filter( TaggedObject.tag_id == tag.id, @@ -96,17 +92,13 @@ def delete_tagged_object( TaggedObject.object_id == object_id, ) if not tagged_object: - raise DAODeleteFailedError( + raise NoResultFound( message=f'Tagged object with object_id: {object_id} \ object_type: {object_type} \ and tag name: "{tag_name}" could not be found' ) - try: - db.session.delete(tagged_object.one()) - db.session.commit() - except SQLAlchemyError as ex: # pragma: no cover - db.session.rollback() - raise DAODeleteFailedError(exception=ex) from ex + + db.session.delete(tagged_object.one()) @staticmethod def delete_tags(tag_names: list[str]) -> None: @@ -117,18 +109,12 @@ def delete_tags(tag_names: list[str]) -> None: for name in tag_names: tag_name = name.strip() if not TagDAO.find_by_name(tag_name): - raise DAODeleteFailedError( - message=f"Tag with name {tag_name} does not exist." - ) + raise NoResultFound(message=f"Tag with name {tag_name} does not exist.") tags_to_delete.append(tag_name) tag_objects = db.session.query(Tag).filter(Tag.name.in_(tags_to_delete)) + for tag in tag_objects: - try: - db.session.delete(tag) - db.session.commit() - except SQLAlchemyError as ex: # pragma: no cover - db.session.rollback() - raise DAODeleteFailedError(exception=ex) from ex + db.session.delete(tag) @staticmethod def get_by_name(name: str, type_: TagType = TagType.custom) -> Tag: @@ -283,21 +269,10 @@ def favorite_tag_by_id_for_current_user( # pylint: disable=invalid-name ) -> None: """ Marks a specific tag as a favorite for the current user. - This function will find the tag by the provided id, - create a new UserFavoriteTag object that represents - the user's preference, add that object to the database - session, and commit the session. It uses the currently - authenticated user from the global 'g' object. - Args: - tag_id: The id of the tag that is to be marked as - favorite. - Raises: - Any exceptions raised by the find_by_id function, - the UserFavoriteTag constructor, or the database session's - add and commit methods will propagate up to the caller. - Returns: - None. + + :param tag_id: The id of the tag that is to be marked as favorite """ + tag = TagDAO.find_by_id(tag_id) user = g.user @@ -307,26 +282,13 @@ def favorite_tag_by_id_for_current_user( # pylint: disable=invalid-name raise TagNotFoundError() tag.users_favorited.append(user) - db.session.commit() @staticmethod def remove_user_favorite_tag(tag_id: int) -> None: """ Removes a tag from the current user's favorite tags. - This function will find the tag by the provided id and remove the tag - from the user's list of favorite tags. It uses the currently authenticated - user from the global 'g' object. - - Args: - tag_id: The id of the tag that is to be removed from the favorite tags. - - Raises: - Any exceptions raised by the find_by_id function, the database session's - commit method will propagate up to the caller. - - Returns: - None. + :param tag_id: The id of the tag that is to be removed from the favorite tags """ tag = TagDAO.find_by_id(tag_id) user = g.user @@ -338,9 +300,6 @@ def remove_user_favorite_tag(tag_id: int) -> None: tag.users_favorited.remove(user) - # Commit to save the changes - db.session.commit() - @staticmethod def favorited_ids(tags: list[Tag]) -> list[int]: """ @@ -424,5 +383,4 @@ def create_tag_relationship( object_id, tag.name, ) - db.session.add_all(tagged_objects) diff --git a/superset/daos/user.py b/superset/daos/user.py index cc6696cbdcc7..90a9b2bd2f6e 100644 --- a/superset/daos/user.py +++ b/superset/daos/user.py @@ -40,4 +40,3 @@ def set_avatar_url(user: User, url: str) -> None: attrs = UserAttribute(avatar_url=url, user_id=user.id) user.extra_attributes = [attrs] db.session.add(attrs) - db.session.commit() diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 3fe557a6843b..823bfdfa8cc8 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -32,7 +32,7 @@ from werkzeug.wrappers import Response as WerkzeugResponse from werkzeug.wsgi import FileWrapper -from superset import is_feature_enabled, thumbnail_cache +from superset import db, is_feature_enabled, thumbnail_cache from superset.charts.schemas import ChartEntityResponseSchema from superset.commands.dashboard.create import CreateDashboardCommand from superset.commands.dashboard.delete import DeleteDashboardCommand @@ -1314,7 +1314,13 @@ def set_embedded(self, dashboard: Dashboard) -> Response: """ try: body = self.embedded_config_schema.load(request.json) - embedded = EmbeddedDashboardDAO.upsert(dashboard, body["allowed_domains"]) + + with db.session.begin_nested(): + embedded = EmbeddedDashboardDAO.upsert( + dashboard, + body["allowed_domains"], + ) + result = self.embedded_response_schema.dump(embedded) return self.response(200, result=result) except ValidationError as error: diff --git a/superset/databases/api.py b/superset/databases/api.py index 2c0aff8da03d..3a672eb7662b 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -1410,7 +1410,7 @@ def oauth2(self) -> FlaskResponse: database_id=state["database_id"], ) if existing: - DatabaseUserOAuth2TokensDAO.delete([existing], commit=True) + DatabaseUserOAuth2TokensDAO.delete([existing]) # store tokens expiration = datetime.now() + timedelta(seconds=token_response["expires_in"]) @@ -1422,7 +1422,6 @@ def oauth2(self) -> FlaskResponse: "access_token_expiration": expiration, "refresh_token": token_response.get("refresh_token"), }, - commit=True, ) # return blank page that closes itself diff --git a/superset/db_engine_specs/gsheets.py b/superset/db_engine_specs/gsheets.py index e876aca8defd..fd5ec6722ba0 100644 --- a/superset/db_engine_specs/gsheets.py +++ b/superset/db_engine_specs/gsheets.py @@ -455,4 +455,4 @@ def df_to_sql( # pylint: disable=too-many-locals catalog[table.table] = spreadsheet_url database.extra = json.dumps(extra) db.session.add(database) - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index 519618aaa668..e3cf128b7a2c 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -408,7 +408,7 @@ def handle_cursor( # pylint: disable=too-many-locals logger.info("Query %s: [%s] %s", str(query_id), str(job_id), l) last_log_line = len(log_lines) if needs_commit: - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction if sleep_interval := current_app.config.get("HIVE_POLL_INTERVAL"): logger.warning( "HIVE_POLL_INTERVAL is deprecated and will be removed in 3.0. Please use DB_POLL_INTERVAL_SECONDS instead" diff --git a/superset/db_engine_specs/impala.py b/superset/db_engine_specs/impala.py index 62360e77bbd1..ea74df83164f 100644 --- a/superset/db_engine_specs/impala.py +++ b/superset/db_engine_specs/impala.py @@ -151,7 +151,7 @@ def handle_cursor(cls, cursor: Any, query: Query) -> None: needs_commit = True if needs_commit: - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction sleep_interval = current_app.config["DB_POLL_INTERVAL_SECONDS"].get( cls.engine, 5 ) diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index 5e0b433e1e11..fbd0eff48447 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-lines +# pylint: disable=consider-using-transaction,too-many-lines from __future__ import annotations import contextlib diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index ce0e03be7750..143276bdc3dc 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# pylint: disable=consider-using-transaction from __future__ import annotations import contextlib diff --git a/superset/examples/bart_lines.py b/superset/examples/bart_lines.py index 8b3b31522652..c1a0897eb3f7 100644 --- a/superset/examples/bart_lines.py +++ b/superset/examples/bart_lines.py @@ -65,5 +65,4 @@ def load_bart_lines(only_metadata: bool = False, force: bool = False) -> None: tbl.description = "BART lines" tbl.database = database tbl.filter_select_enabled = True - db.session.commit() tbl.fetch_metadata() diff --git a/superset/examples/birth_names.py b/superset/examples/birth_names.py index 229734057ceb..81e31e741655 100644 --- a/superset/examples/birth_names.py +++ b/superset/examples/birth_names.py @@ -111,8 +111,6 @@ def load_birth_names( _set_table_metadata(obj, database) _add_table_metrics(obj) - db.session.commit() - slices, _ = create_slices(obj) create_dashboard(slices) @@ -844,5 +842,4 @@ def create_dashboard(slices: list[Slice]) -> Dashboard: dash.dashboard_title = "USA Births Names" dash.position_json = json.dumps(pos, indent=4) dash.slug = "births" - db.session.commit() return dash diff --git a/superset/examples/country_map.py b/superset/examples/country_map.py index 1741219470ac..53f4a0b874ff 100644 --- a/superset/examples/country_map.py +++ b/superset/examples/country_map.py @@ -88,7 +88,6 @@ def load_country_map_data(only_metadata: bool = False, force: bool = False) -> N if not any(col.metric_name == "avg__2004" for col in obj.metrics): col = str(column("2004").compile(db.engine)) obj.metrics.append(SqlMetric(metric_name="avg__2004", expression=f"AVG({col})")) - db.session.commit() obj.fetch_metadata() tbl = obj diff --git a/superset/examples/css_templates.py b/superset/examples/css_templates.py index 2f67d2e1faac..91bb54c15775 100644 --- a/superset/examples/css_templates.py +++ b/superset/examples/css_templates.py @@ -52,7 +52,6 @@ def load_css_templates() -> None: """ ) obj.css = css - db.session.commit() obj = db.session.query(CssTemplate).filter_by(template_name="Courier Black").first() if not obj: @@ -97,4 +96,3 @@ def load_css_templates() -> None: """ ) obj.css = css - db.session.commit() diff --git a/superset/examples/deck.py b/superset/examples/deck.py index b0cb65b03fc2..931924dd0879 100644 --- a/superset/examples/deck.py +++ b/superset/examples/deck.py @@ -541,4 +541,3 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements dash.dashboard_title = title dash.slug = slug dash.slices = slices - db.session.commit() diff --git a/superset/examples/energy.py b/superset/examples/energy.py index 98b444f9db2f..d7e46ec5d8c3 100644 --- a/superset/examples/energy.py +++ b/superset/examples/energy.py @@ -14,8 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Loads datasets, dashboards and slices in a new superset instance""" - import textwrap import pandas as pd @@ -79,7 +77,6 @@ def load_energy( SqlMetric(metric_name="sum__value", expression=f"SUM({col})") ) - db.session.commit() tbl.fetch_metadata() slc = Slice( diff --git a/superset/examples/flights.py b/superset/examples/flights.py index 4db029519fd8..f8659c24d07f 100644 --- a/superset/examples/flights.py +++ b/superset/examples/flights.py @@ -66,6 +66,5 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None: tbl.description = "Random set of flights in the US" tbl.database = database tbl.filter_select_enabled = True - db.session.commit() tbl.fetch_metadata() print("Done loading table!") diff --git a/superset/examples/helpers.py b/superset/examples/helpers.py index b865e2dfca93..4cc9a47b2700 100644 --- a/superset/examples/helpers.py +++ b/superset/examples/helpers.py @@ -14,8 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Loads datasets, dashboards and slices in a new superset instance""" - import os from typing import Any @@ -62,7 +60,6 @@ def merge_slice(slc: Slice) -> None: if o: db.session.delete(o) db.session.add(slc) - db.session.commit() def get_slice_json(defaults: dict[Any, Any], **kwargs: Any) -> str: diff --git a/superset/examples/long_lat.py b/superset/examples/long_lat.py index 4f8de31453c1..5afb65f6fd91 100644 --- a/superset/examples/long_lat.py +++ b/superset/examples/long_lat.py @@ -97,7 +97,6 @@ def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None obj.main_dttm_col = "datetime" obj.database = database obj.filter_select_enabled = True - db.session.commit() obj.fetch_metadata() tbl = obj diff --git a/superset/examples/misc_dashboard.py b/superset/examples/misc_dashboard.py index 825dc6352c8e..4a7079e2cddc 100644 --- a/superset/examples/misc_dashboard.py +++ b/superset/examples/misc_dashboard.py @@ -140,4 +140,3 @@ def load_misc_dashboard() -> None: dash.position_json = json.dumps(pos, indent=4) dash.slug = DASH_SLUG dash.slices = slices - db.session.commit() diff --git a/superset/examples/multiformat_time_series.py b/superset/examples/multiformat_time_series.py index 979be10686f5..9cfe44c1994c 100644 --- a/superset/examples/multiformat_time_series.py +++ b/superset/examples/multiformat_time_series.py @@ -102,7 +102,6 @@ def load_multiformat_time_series( # pylint: disable=too-many-locals col.python_date_format = dttm_and_expr[0] col.database_expression = dttm_and_expr[1] col.is_dttm = True - db.session.commit() obj.fetch_metadata() tbl = obj diff --git a/superset/examples/paris.py b/superset/examples/paris.py index 990aa01ca6c3..928e2294072a 100644 --- a/superset/examples/paris.py +++ b/superset/examples/paris.py @@ -62,5 +62,4 @@ def load_paris_iris_geojson(only_metadata: bool = False, force: bool = False) -> tbl.description = "Map of Paris" tbl.database = database tbl.filter_select_enabled = True - db.session.commit() tbl.fetch_metadata() diff --git a/superset/examples/random_time_series.py b/superset/examples/random_time_series.py index ec232995fa2e..10ece826b6a1 100644 --- a/superset/examples/random_time_series.py +++ b/superset/examples/random_time_series.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - import pandas as pd from sqlalchemy import DateTime, inspect, String @@ -72,7 +71,6 @@ def load_random_time_series_data( obj.main_dttm_col = "ds" obj.database = database obj.filter_select_enabled = True - db.session.commit() obj.fetch_metadata() tbl = obj diff --git a/superset/examples/sf_population_polygons.py b/superset/examples/sf_population_polygons.py index 4fa59db721a6..b8d5527ed247 100644 --- a/superset/examples/sf_population_polygons.py +++ b/superset/examples/sf_population_polygons.py @@ -64,5 +64,4 @@ def load_sf_population_polygons( tbl.description = "Population density of San Francisco" tbl.database = database tbl.filter_select_enabled = True - db.session.commit() tbl.fetch_metadata() diff --git a/superset/examples/supported_charts_dashboard.py b/superset/examples/supported_charts_dashboard.py index 49141eb73cf6..c605bf88cc57 100644 --- a/superset/examples/supported_charts_dashboard.py +++ b/superset/examples/supported_charts_dashboard.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - # pylint: disable=too-many-lines - import textwrap from sqlalchemy import inspect @@ -1274,4 +1272,3 @@ def load_supported_charts_dashboard() -> None: dash.dashboard_title = "Supported Charts Dashboard" dash.position_json = json.dumps(pos, indent=2) dash.slug = DASH_SLUG - db.session.commit() diff --git a/superset/examples/tabbed_dashboard.py b/superset/examples/tabbed_dashboard.py index bbc11e77306a..b44c2a6d2be9 100644 --- a/superset/examples/tabbed_dashboard.py +++ b/superset/examples/tabbed_dashboard.py @@ -14,8 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Loads datasets, dashboards and slices in a new superset instance""" - import textwrap from superset import db @@ -558,4 +556,3 @@ def load_tabbed_dashboard(_: bool = False) -> None: dash.slices = slices dash.dashboard_title = "Tabbed Dashboard" dash.slug = slug - db.session.commit() diff --git a/superset/examples/world_bank.py b/superset/examples/world_bank.py index afbb6a994a83..a9c06dfa2942 100644 --- a/superset/examples/world_bank.py +++ b/superset/examples/world_bank.py @@ -14,8 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Loads datasets, dashboards and slices in a new superset instance""" - import os import pandas as pd @@ -41,7 +39,7 @@ from superset.utils.core import DatasourceType -def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-statements +def load_world_bank_health_n_pop( # pylint: disable=too-many-locals only_metadata: bool = False, force: bool = False, sample: bool = False, @@ -110,7 +108,6 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s SqlMetric(metric_name=metric, expression=f"{aggr_func}({col})") ) - db.session.commit() tbl.fetch_metadata() slices = create_slices(tbl) @@ -134,7 +131,6 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s dash.position_json = json.dumps(pos, indent=4) dash.slug = slug dash.slices = slices - db.session.commit() def create_slices(tbl: BaseDatasource) -> list[Slice]: diff --git a/superset/extensions/metastore_cache.py b/superset/extensions/metastore_cache.py index 7b4e39677e48..1c89e8459774 100644 --- a/superset/extensions/metastore_cache.py +++ b/superset/extensions/metastore_cache.py @@ -22,7 +22,6 @@ from flask import current_app, Flask, has_app_context from flask_caching import BaseCache -from superset import db from superset.key_value.exceptions import KeyValueCreateFailedError from superset.key_value.types import ( KeyValueCodec, @@ -95,7 +94,6 @@ def set(self, key: str, value: Any, timeout: Optional[int] = None) -> bool: codec=self.codec, expires_on=self._get_expiry(timeout), ).run() - db.session.commit() return True def add(self, key: str, value: Any, timeout: Optional[int] = None) -> bool: @@ -111,7 +109,6 @@ def add(self, key: str, value: Any, timeout: Optional[int] = None) -> bool: key=self.get_key(key), expires_on=self._get_expiry(timeout), ).run() - db.session.commit() return True except KeyValueCreateFailedError: return False @@ -136,6 +133,4 @@ def delete(self, key: str) -> Any: # pylint: disable=import-outside-toplevel from superset.commands.key_value.delete import DeleteKeyValueCommand - ret = DeleteKeyValueCommand(resource=RESOURCE, key=self.get_key(key)).run() - db.session.commit() - return ret + return DeleteKeyValueCommand(resource=RESOURCE, key=self.get_key(key)).run() diff --git a/superset/extensions/pylint.py b/superset/extensions/pylint.py index 1cf9821f4460..5925f180b785 100644 --- a/superset/extensions/pylint.py +++ b/superset/extensions/pylint.py @@ -56,5 +56,22 @@ def visit_importfrom(self, node: nodes.ImportFrom) -> None: self.add_message("disallowed-import", node=node) +class TransactionChecker(BaseChecker): + name = "consider-using-transaction" + msgs = { + "W0001": ( + 'Consider using the @transaction decorator when defining a "unit of work"', + "consider-using-transaction", + "Used when an explicit commit or rollback call is detected", + ), + } + + def visit_call(self, node: nodes.Call) -> None: + if isinstance(node.func, nodes.Attribute): + if node.func.attrname in ("commit", "rollback"): + self.add_message("consider-using-transaction", node=node) + + def register(linter: PyLinter) -> None: linter.register_checker(JSONLibraryImportChecker(linter)) + linter.register_checker(TransactionChecker(linter)) diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index a98b8c94892d..f074eaf293d5 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -56,6 +56,7 @@ from superset.superset_typing import FlaskResponse from superset.tags.core import register_sqla_event_listeners from superset.utils.core import is_test, pessimistic_connection_handling +from superset.utils.decorators import transaction from superset.utils.log import DBEventLogger, get_event_logger_from_cfg_value if TYPE_CHECKING: @@ -513,6 +514,7 @@ def configure_cache(self) -> None: def configure_feature_flags(self) -> None: feature_flag_manager.init_app(self.superset_app) + @transaction() def configure_fab(self) -> None: if self.config["SILENCE_FAB"]: logging.getLogger("flask_appbuilder").setLevel(logging.ERROR) diff --git a/superset/key_value/shared_entries.py b/superset/key_value/shared_entries.py index f472838d2e09..130313157a53 100644 --- a/superset/key_value/shared_entries.py +++ b/superset/key_value/shared_entries.py @@ -18,7 +18,6 @@ from typing import Any, Optional from uuid import uuid3 -from superset import db from superset.key_value.types import JsonKeyValueCodec, KeyValueResource, SharedKey from superset.key_value.utils import get_uuid_namespace, random_key @@ -46,7 +45,6 @@ def set_shared_value(key: SharedKey, value: Any) -> None: key=uuid_key, codec=CODEC, ).run() - db.session.commit() def get_permalink_salt(key: SharedKey) -> str: diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index c2048f2a556c..28d8aacc7bed 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -83,7 +83,7 @@ def copy_dashboard(_mapper: Mapper, _connection: Connection, target: Dashboard) user_id=target.id, welcome_dashboard_id=dashboard.id ) session.add(extra_attributes) - session.commit() + session.commit() # pylint: disable=consider-using-transaction sqla.event.listen(User, "after_insert", copy_dashboard) diff --git a/superset/queries/api.py b/superset/queries/api.py index 0695946fe07f..67afd8a81763 100644 --- a/superset/queries/api.py +++ b/superset/queries/api.py @@ -231,8 +231,8 @@ def get_updated_since(self, **kwargs: Any) -> FlaskResponse: backoff.constant, Exception, interval=1, - on_backoff=lambda details: db.session.rollback(), - on_giveup=lambda details: db.session.rollback(), + on_backoff=lambda details: db.session.rollback(), # pylint: disable=consider-using-transaction + on_giveup=lambda details: db.session.rollback(), # pylint: disable=consider-using-transaction max_tries=5, ) @requires_json diff --git a/superset/row_level_security/api.py b/superset/row_level_security/api.py index 86956683cb15..077d55ff4ebb 100644 --- a/superset/row_level_security/api.py +++ b/superset/row_level_security/api.py @@ -23,6 +23,7 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_babel import ngettext from marshmallow import ValidationError +from sqlalchemy.exc import SQLAlchemyError from superset.commands.exceptions import ( DatasourceNotFoundValidationError, @@ -34,7 +35,6 @@ from superset.commands.security.update import UpdateRLSRuleCommand from superset.connectors.sqla.models import RowLevelSecurityFilter from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.daos.exceptions import DAOCreateFailedError, DAOUpdateFailedError from superset.extensions import event_logger from superset.row_level_security.schemas import ( get_delete_ids_schema, @@ -205,7 +205,7 @@ def post(self) -> Response: exc_info=True, ) return self.response_422(message=str(ex)) - except DAOCreateFailedError as ex: + except SQLAlchemyError as ex: logger.error( "Error creating RLS rule %s: %s", self.__class__.__name__, @@ -291,7 +291,7 @@ def put(self, pk: int) -> Response: exc_info=True, ) return self.response_422(message=str(ex)) - except DAOUpdateFailedError as ex: + except SQLAlchemyError as ex: logger.error( "Error updating RLS rule %s: %s", self.__class__.__name__, diff --git a/superset/security/manager.py b/superset/security/manager.py index ea2ee5ef83a1..b4bc0c6103de 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -1019,7 +1019,6 @@ def clean_perms(self) -> None: ) if deleted_count := pvms.delete(): logger.info("Deleted %i faulty permissions", deleted_count) - self.get_session.commit() def sync_role_definitions(self) -> None: """ @@ -1045,7 +1044,6 @@ def sync_role_definitions(self) -> None: self.auth_role_public, merge=True, ) - self.create_missing_perms() self.clean_perms() @@ -1119,7 +1117,6 @@ def copy_role( ): role_from_permissions.append(permission_view) role_to.permissions = role_from_permissions - self.get_session.commit() def set_role( self, @@ -1140,7 +1137,6 @@ def set_role( permission_view for permission_view in pvms if pvm_check(permission_view) ] role.permissions = role_pvms - self.get_session.commit() def _is_admin_only(self, pvm: PermissionView) -> bool: """ diff --git a/superset/sql_lab.py b/superset/sql_lab.py index cb2cbe455cce..9712ab47ab42 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# pylint: disable=consider-using-transaction import dataclasses import logging import uuid @@ -127,6 +128,7 @@ def handle_query_error( def get_query_backoff_handler(details: dict[Any, Any]) -> None: + print(details) query_id = details["kwargs"]["query_id"] logger.error( "Query with id `%s` could not be retrieved", str(query_id), exc_info=True diff --git a/superset/sqllab/sql_json_executer.py b/superset/sqllab/sql_json_executer.py index fde73aef0a86..ac9968ed6b46 100644 --- a/superset/sqllab/sql_json_executer.py +++ b/superset/sqllab/sql_json_executer.py @@ -90,6 +90,7 @@ def execute( rendered_query: str, log_params: dict[str, Any] | None, ) -> SqlJsonExecutionStatus: + print(">>> execute <<<") query_id = execution_context.query.id try: data = self._get_sql_results_with_timeout( @@ -101,6 +102,7 @@ def execute( raise except Exception as ex: logger.exception("Query %i failed unexpectedly", query_id) + print(str(ex)) raise SupersetGenericDBErrorException( utils.error_msg_from_exception(ex) ) from ex @@ -112,6 +114,7 @@ def execute( [SupersetError(**params) for params in data["errors"]] # type: ignore ) # old string-only error message + print(data) raise SupersetGenericDBErrorException(data["error"]) # type: ignore return SqlJsonExecutionStatus.HAS_RESULTS diff --git a/superset/tags/models.py b/superset/tags/models.py index ba859f519bf4..31975c3e8e88 100644 --- a/superset/tags/models.py +++ b/superset/tags/models.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# pylint: disable=consider-using-transaction from __future__ import annotations import enum diff --git a/superset/tasks/celery_app.py b/superset/tasks/celery_app.py index 4d36917be0bb..5a0963ccd544 100644 --- a/superset/tasks/celery_app.py +++ b/superset/tasks/celery_app.py @@ -62,7 +62,7 @@ def teardown( # pylint: disable=unused-argument if flask_app.config.get("SQLALCHEMY_COMMIT_ON_TEARDOWN"): if not isinstance(retval, Exception): - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction if not flask_app.config.get("CELERY_ALWAYS_EAGER"): db.session.remove() diff --git a/superset/utils/database.py b/superset/utils/database.py index 073e58ffda6f..719e7f2d772c 100644 --- a/superset/utils/database.py +++ b/superset/utils/database.py @@ -54,13 +54,12 @@ def get_or_create_db( ) db.session.add(database) database.set_sqlalchemy_uri(sqlalchemy_uri) - db.session.commit() # todo: it's a bad idea to do an update in a get/create function if database and database.sqlalchemy_uri_decrypted != sqlalchemy_uri: database.set_sqlalchemy_uri(sqlalchemy_uri) - db.session.commit() + db.session.flush() return database @@ -80,4 +79,4 @@ def remove_database(database: Database) -> None: from superset import db db.session.delete(database) - db.session.commit() + db.session.flush() diff --git a/superset/utils/decorators.py b/superset/utils/decorators.py index 3900bdd4156a..844a8f063c1b 100644 --- a/superset/utils/decorators.py +++ b/superset/utils/decorators.py @@ -20,10 +20,12 @@ import time from collections.abc import Iterator from contextlib import contextmanager +from functools import wraps from typing import Any, Callable, TYPE_CHECKING from uuid import UUID from flask import current_app, g, Response +from sqlalchemy.exc import SQLAlchemyError from superset.utils import core as utils from superset.utils.dates import now_as_float @@ -207,3 +209,64 @@ def suppress_logging( yield finally: target_logger.setLevel(original_level) + + +def on_error( + ex: Exception, + catches: tuple[type[Exception], ...] = (SQLAlchemyError,), + reraise: type[Exception] | None = SQLAlchemyError, +) -> None: + """ + Default error handler whenever any exception is caught during a SQLAlchemy nested + transaction. + + :param ex: The source exception + :param catches: The exception types the handler catches + :param reraise: The exception type the handler raises after catching + :raises Exception: If the exception is not swallowed + """ + + if isinstance(ex, catches): + if hasattr(ex, "exception"): + logger.exception(ex.exception) + + if reraise: + raise reraise() from ex + else: + raise ex + + +def transaction( # pylint: disable=redefined-outer-name + on_error: Callable[..., Any] | None = on_error, +) -> Callable[..., Any]: + """ + Perform a "unit of work". + + Note ideally this would leverage SQLAlchemy's nested transaction, however this + proved rather complicated, likely due to many architectural facets, and thus has + been left for a follow up exercise. + + :param on_error: Callback invoked when an exception is caught + :see: https://github.com/apache/superset/issues/25108 + """ + + def decorate(func: Callable[..., Any]) -> Callable[..., Any]: + @wraps(func) + def wrapped(*args: Any, **kwargs: Any) -> Any: + from superset import db # pylint: disable=import-outside-toplevel + + try: + result = func(*args, **kwargs) + db.session.commit() # pylint: disable=consider-using-transaction + return result + except Exception as ex: + db.session.rollback() # pylint: disable=consider-using-transaction + + if on_error: + return on_error(ex) + + raise + + return wrapped + + return decorate diff --git a/superset/utils/lock.py b/superset/utils/lock.py index 3cd3c8ead53a..4723b57fa1b0 100644 --- a/superset/utils/lock.py +++ b/superset/utils/lock.py @@ -24,7 +24,6 @@ from datetime import datetime, timedelta from typing import Any, cast, TypeVar, Union -from superset import db from superset.exceptions import CreateKeyValueDistributedLockFailedException from superset.key_value.exceptions import KeyValueCreateFailedError from superset.key_value.types import JsonKeyValueCodec, KeyValueResource @@ -72,7 +71,6 @@ def KeyValueDistributedLock( # pylint: disable=invalid-name store. :param namespace: The namespace for which the lock is to be acquired. - :type namespace: str :param kwargs: Additional keyword arguments. :yields: A unique identifier (UUID) for the acquired lock (the KV key). :raises CreateKeyValueDistributedLockFailedException: If the lock is taken. @@ -93,12 +91,10 @@ def KeyValueDistributedLock( # pylint: disable=invalid-name value=True, expires_on=datetime.now() + LOCK_EXPIRATION, ).run() - db.session.commit() yield key DeleteKeyValueCommand(resource=KeyValueResource.LOCK, key=key).run() - db.session.commit() logger.debug("Removed lock on namespace %s for key %s", namespace, key) except KeyValueCreateFailedError as ex: raise CreateKeyValueDistributedLockFailedException( diff --git a/superset/utils/log.py b/superset/utils/log.py index 4b9ebb50b989..71c552883307 100644 --- a/superset/utils/log.py +++ b/superset/utils/log.py @@ -403,7 +403,7 @@ def log( # pylint: disable=too-many-arguments,too-many-locals logs.append(log) try: db.session.bulk_save_objects(logs) - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction except SQLAlchemyError as ex: logging.error("DBEventLogger failed to log event(s)") logging.exception(ex) diff --git a/superset/views/base.py b/superset/views/base.py index dc90b0772897..b836c5076c4e 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -63,6 +63,7 @@ app as superset_app, appbuilder, conf, + db, get_feature_flags, is_feature_enabled, security_manager, @@ -698,7 +699,7 @@ def _delete(self: BaseView, primary_key: int) -> None: if view_menu: security_manager.get_session.delete(view_menu) - security_manager.get_session.commit() + db.session.commit() # pylint: disable=consider-using-transaction flash(*self.datamodel.message) self.update_redirect() diff --git a/superset/views/core.py b/superset/views/core.py index 5f76b05a7881..75a04dedc7a8 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -619,10 +619,12 @@ def save_or_overwrite_slice( if action == "saveas" and slice_add_perm: ChartDAO.create(slc) + db.session.commit() # pylint: disable=consider-using-transaction msg = _("Chart [{}] has been saved").format(slc.slice_name) flash(msg, "success") elif action == "overwrite" and slice_overwrite_perm: ChartDAO.update(slc) + db.session.commit() # pylint: disable=consider-using-transaction msg = _("Chart [{}] has been overwritten").format(slc.slice_name) flash(msg, "success") @@ -676,7 +678,7 @@ def save_or_overwrite_slice( if dash and slc not in dash.slices: dash.slices.append(slc) - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction response = { "can_add": slice_add_perm, diff --git a/superset/views/dashboard/views.py b/superset/views/dashboard/views.py index 2e88b4acd02b..8a419fcb26f9 100644 --- a/superset/views/dashboard/views.py +++ b/superset/views/dashboard/views.py @@ -122,7 +122,7 @@ def new(self) -> FlaskResponse: owners=[g.user], ) db.session.add(new_dashboard) - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction return redirect(f"/superset/dashboard/{new_dashboard.id}/?edit=true") @expose("//embedded") diff --git a/superset/views/datasource/views.py b/superset/views/datasource/views.py index 89907df000fa..377579cf05d1 100644 --- a/superset/views/datasource/views.py +++ b/superset/views/datasource/views.py @@ -116,7 +116,7 @@ def save(self) -> FlaskResponse: ) orm_datasource.update_from_object(datasource_dict) data = orm_datasource.data - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction return self.json_response(sanitize_datasource_data(data)) diff --git a/superset/views/key_value.py b/superset/views/key_value.py index 3ba53073c704..69a5314c5fb5 100644 --- a/superset/views/key_value.py +++ b/superset/views/key_value.py @@ -48,7 +48,7 @@ def store(self) -> FlaskResponse: value = request.form.get("data") obj = models.KeyValue(value=value) db.session.add(obj) - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction except Exception as ex: # pylint: disable=broad-except return json_error_response(utils.error_msg_from_exception(ex)) return Response(json.dumps({"id": obj.id}), status=200) diff --git a/superset/views/sql_lab/views.py b/superset/views/sql_lab/views.py index 693299118d08..3ec366726747 100644 --- a/superset/views/sql_lab/views.py +++ b/superset/views/sql_lab/views.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# pylint: disable=consider-using-transaction import logging from flask import request, Response @@ -272,6 +273,5 @@ def expanded(self, table_schema_id: int) -> FlaskResponse: .filter_by(id=table_schema_id) .update({"expanded": payload}) ) - db.session.commit() response = json.dumps({"id": table_schema_id, "expanded": payload}) return json_success(response) diff --git a/tests/integration_tests/base_tests.py b/tests/integration_tests/base_tests.py index 77633d65642e..0e407b86573d 100644 --- a/tests/integration_tests/base_tests.py +++ b/tests/integration_tests/base_tests.py @@ -203,8 +203,7 @@ def temporary_user( previous_g_user = g.user if hasattr(g, "user") else None try: if login: - resp = self.login(username=temp_user.username) - print(resp) + self.login(username=temp_user.username) else: g.user = temp_user yield temp_user diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index 6d25fe81905a..a9af7c12b399 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -1266,7 +1266,6 @@ def test_admin_gets_filtered_energy_slices(self): assert rv.status_code == 200 assert data["count"] > 0 for chart in data["result"]: - print(chart) assert ( "energy" in " ".join( diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py index 58cfd9d494cb..56b0a9a793b0 100644 --- a/tests/integration_tests/charts/data/api_tests.py +++ b/tests/integration_tests/charts/data/api_tests.py @@ -1211,6 +1211,9 @@ def test_chart_data_cache_no_login(self, cache_loader): """ Chart data cache API: Test chart data async cache request (no login) """ + if get_example_database().backend == "presto": + return + app._got_first_request = False async_query_manager_factory.init_app(app) self.logout() diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py index f180da9aed8b..537c1c882e0c 100644 --- a/tests/integration_tests/conftest.py +++ b/tests/integration_tests/conftest.py @@ -124,10 +124,6 @@ def setup_sample_data() -> Any: with app.app_context(): setup_presto_if_needed() - from superset.cli.test import load_test_users_run - - load_test_users_run() - from superset.examples.css_templates import load_css_templates load_css_templates() diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index 9166d549588c..44b7ef26e64c 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -814,7 +814,7 @@ def set(self): mock_cache.return_value = MockCache() rv = self.client.get("/superset/explore_json/data/valid-cache-key") - self.assertEqual(rv.status_code, 401) + self.assertEqual(rv.status_code, 403) def test_explore_json_data_invalid_cache_key(self): self.login(ADMIN_USERNAME) diff --git a/tests/integration_tests/dashboard_tests.py b/tests/integration_tests/dashboard_tests.py index 1852adba48af..bee8de7a5e06 100644 --- a/tests/integration_tests/dashboard_tests.py +++ b/tests/integration_tests/dashboard_tests.py @@ -186,7 +186,11 @@ def test_dashboard_with_created_by_can_be_accessed_by_public_users(self): # Cleanup self.revoke_public_access_to_table(table) - @pytest.mark.usefixtures("load_energy_table_with_slice", "load_dashboard") + @pytest.mark.usefixtures( + "public_role_like_gamma", + "load_energy_table_with_slice", + "load_dashboard", + ) def test_users_can_list_published_dashboard(self): self.login(ALPHA_USERNAME) resp = self.get_resp("/api/v1/dashboard/") diff --git a/tests/integration_tests/dashboards/commands_tests.py b/tests/integration_tests/dashboards/commands_tests.py index 06edd6c6d0f1..334e0425cf1f 100644 --- a/tests/integration_tests/dashboards/commands_tests.py +++ b/tests/integration_tests/dashboards/commands_tests.py @@ -592,7 +592,6 @@ def test_import_v1_dashboard_multiple(self, mock_g): } command = v1.ImportDashboardsCommand(contents, overwrite=True) command.run() - command.run() new_num_dashboards = db.session.query(Dashboard).count() assert new_num_dashboards == num_dashboards + 1 diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index d4a1ac08c21c..8d0cd0810f8b 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -281,7 +281,6 @@ def test_create_database(self): "server_cert": None, "extra": json.dumps(extra), } - uri = "api/v1/database/" rv = self.client.post(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) @@ -713,7 +712,6 @@ def test_cascade_delete_ssh_tunnel( "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, "ssh_tunnel": ssh_tunnel_properties, } - uri = "api/v1/database/" rv = self.client.post(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) @@ -923,7 +921,6 @@ def test_create_database_invalid_configuration_method(self): "server_cert": None, "extra": json.dumps(extra), } - uri = "api/v1/database/" rv = self.client.post(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index 59277a5bb6dd..37de6e87c27a 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -26,17 +26,13 @@ import pytest import yaml from sqlalchemy import inspect +from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import joinedload from sqlalchemy.sql import func from superset import app # noqa: F401 from superset.commands.dataset.exceptions import DatasetCreateFailedError from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn -from superset.daos.exceptions import ( - DAOCreateFailedError, - DAODeleteFailedError, - DAOUpdateFailedError, -) from superset.extensions import db, security_manager from superset.models.core import Database from superset.models.slice import Slice @@ -197,7 +193,6 @@ def test_user_gets_all_datasets(self): def count_datasets(): uri = "api/v1/chart/" rv = self.client.get(uri, "get_list") - print(rv.data) self.assertEqual(rv.status_code, 200) data = rv.get_json() return data["count"] @@ -879,7 +874,7 @@ def test_create_dataset_sqlalchemy_error(self, mock_dao_create): Dataset API: Test create dataset sqlalchemy error """ - mock_dao_create.side_effect = DAOCreateFailedError() + mock_dao_create.side_effect = SQLAlchemyError() self.login(ADMIN_USERNAME) main_db = get_main_database() dataset_data = { @@ -1487,7 +1482,7 @@ def test_update_dataset_sqlalchemy_error(self, mock_dao_update): Dataset API: Test update dataset sqlalchemy error """ - mock_dao_update.side_effect = DAOUpdateFailedError() + mock_dao_update.side_effect = SQLAlchemyError() dataset = self.insert_default_dataset() self.login(ADMIN_USERNAME) @@ -1551,7 +1546,7 @@ def test_delete_dataset_sqlalchemy_error(self, mock_dao_delete): Dataset API: Test delete dataset sqlalchemy error """ - mock_dao_delete.side_effect = DAODeleteFailedError() + mock_dao_delete.side_effect = SQLAlchemyError() dataset = self.insert_default_dataset() self.login(ADMIN_USERNAME) @@ -1620,7 +1615,7 @@ def test_delete_dataset_column_fail(self, mock_dao_delete): Dataset API: Test delete dataset column """ - mock_dao_delete.side_effect = DAODeleteFailedError() + mock_dao_delete.side_effect = SQLAlchemyError() dataset = self.get_fixture_datasets()[0] column_id = dataset.columns[0].id self.login(ADMIN_USERNAME) @@ -1692,7 +1687,7 @@ def test_delete_dataset_metric_fail(self, mock_dao_delete): Dataset API: Test delete dataset metric """ - mock_dao_delete.side_effect = DAODeleteFailedError() + mock_dao_delete.side_effect = SQLAlchemyError() dataset = self.get_fixture_datasets()[0] column_id = dataset.metrics[0].id self.login(ADMIN_USERNAME) diff --git a/tests/integration_tests/datasource_tests.py b/tests/integration_tests/datasource_tests.py index 718b6d2d9835..aaad26b85d72 100644 --- a/tests/integration_tests/datasource_tests.py +++ b/tests/integration_tests/datasource_tests.py @@ -88,7 +88,6 @@ def test_external_metadata_for_physical_table(self): ) def test_always_filter_main_dttm(self): - self.login(ADMIN_USERNAME) database = get_example_database() sql = f"SELECT DATE() as default_dttm, DATE() as additional_dttm, 1 as metric;" # noqa: F541 @@ -363,7 +362,6 @@ def test_save(self): elif k == "owners": self.assertEqual([o["id"] for o in resp[k]], datasource_post["owners"]) else: - print(k) self.assertEqual(resp[k], datasource_post[k]) def test_save_default_endpoint_validation_success(self): diff --git a/tests/integration_tests/embedded/api_tests.py b/tests/integration_tests/embedded/api_tests.py index 533f1311d3d6..64afaa178496 100644 --- a/tests/integration_tests/embedded/api_tests.py +++ b/tests/integration_tests/embedded/api_tests.py @@ -44,6 +44,7 @@ def test_get_embedded_dashboard(self): self.login(ADMIN_USERNAME) self.dash = db.session.query(Dashboard).filter_by(slug="births").first() self.embedded = EmbeddedDashboardDAO.upsert(self.dash, []) + db.session.flush() uri = f"api/v1/{self.resource_name}/{self.embedded.uuid}" response = self.client.get(uri) self.assert200(response) diff --git a/tests/integration_tests/embedded/dao_tests.py b/tests/integration_tests/embedded/dao_tests.py index e1f72feb89db..eed161581fe7 100644 --- a/tests/integration_tests/embedded/dao_tests.py +++ b/tests/integration_tests/embedded/dao_tests.py @@ -34,17 +34,21 @@ def test_upsert(self): dash = db.session.query(Dashboard).filter_by(slug="world_health").first() assert not dash.embedded EmbeddedDashboardDAO.upsert(dash, ["test.example.com"]) + db.session.flush() assert dash.embedded self.assertEqual(dash.embedded[0].allowed_domains, ["test.example.com"]) original_uuid = dash.embedded[0].uuid self.assertIsNotNone(original_uuid) EmbeddedDashboardDAO.upsert(dash, []) + db.session.flush() self.assertEqual(dash.embedded[0].allowed_domains, []) self.assertEqual(dash.embedded[0].uuid, original_uuid) @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") def test_get_by_uuid(self): dash = db.session.query(Dashboard).filter_by(slug="world_health").first() - uuid = str(EmbeddedDashboardDAO.upsert(dash, ["test.example.com"]).uuid) + EmbeddedDashboardDAO.upsert(dash, ["test.example.com"]) + db.session.flush() + uuid = str(dash.embedded[0].uuid) embedded = EmbeddedDashboardDAO.find_by_id(uuid) self.assertIsNotNone(embedded) diff --git a/tests/integration_tests/embedded/test_view.py b/tests/integration_tests/embedded/test_view.py index 7fcfcdba9ff0..f4d5ae692556 100644 --- a/tests/integration_tests/embedded/test_view.py +++ b/tests/integration_tests/embedded/test_view.py @@ -44,6 +44,7 @@ def test_get_embedded_dashboard(client: FlaskClient[Any]): # noqa: F811 dash = db.session.query(Dashboard).filter_by(slug="births").first() embedded = EmbeddedDashboardDAO.upsert(dash, []) + db.session.flush() uri = f"embedded/{embedded.uuid}" response = client.get(uri) assert response.status_code == 200 @@ -57,6 +58,7 @@ def test_get_embedded_dashboard(client: FlaskClient[Any]): # noqa: F811 def test_get_embedded_dashboard_referrer_not_allowed(client: FlaskClient[Any]): # noqa: F811 dash = db.session.query(Dashboard).filter_by(slug="births").first() embedded = EmbeddedDashboardDAO.upsert(dash, ["test.example.com"]) + db.session.flush() uri = f"embedded/{embedded.uuid}" response = client.get(uri) assert response.status_code == 403 diff --git a/tests/integration_tests/fixtures/unicode_dashboard.py b/tests/integration_tests/fixtures/unicode_dashboard.py index e68e8f079944..970845783058 100644 --- a/tests/integration_tests/fixtures/unicode_dashboard.py +++ b/tests/integration_tests/fixtures/unicode_dashboard.py @@ -114,7 +114,8 @@ def _create_and_commit_unicode_slice(table: SqlaTable, title: str): def _cleanup(dash: Dashboard, slice_name: str) -> None: db.session.delete(dash) - if slice_name: - slice = db.session.query(Slice).filter_by(slice_name=slice_name).one_or_none() + if slice_name and ( + slice := db.session.query(Slice).filter_by(slice_name=slice_name).one_or_none() + ): db.session.delete(slice) db.session.commit() diff --git a/tests/integration_tests/security/row_level_security_tests.py b/tests/integration_tests/security/row_level_security_tests.py index 2c8a13a71f4d..71bb1484e033 100644 --- a/tests/integration_tests/security/row_level_security_tests.py +++ b/tests/integration_tests/security/row_level_security_tests.py @@ -215,8 +215,6 @@ def test_model_view_rls_add_name_unique(self): }, ) self.assertEqual(rv.status_code, 422) - data = json.loads(rv.data.decode("utf-8")) - assert "Create failed" in data["message"] @pytest.mark.usefixtures("create_dataset") def test_model_view_rls_add_tables_required(self): diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py index f5569b1c8391..86fffee1ec89 100644 --- a/tests/integration_tests/sqla_models_tests.py +++ b/tests/integration_tests/sqla_models_tests.py @@ -543,8 +543,7 @@ def test_fetch_metadata_for_updated_virtual_table(self): # make sure the columns have been mapped properly assert len(table.columns) == 4 - with db.session.no_autoflush: - table.fetch_metadata(commit=False) + table.fetch_metadata() # assert that the removed column has been dropped and # the physical and calculated columns are present diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index a36cb8a8ec35..829854d96681 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -73,7 +73,6 @@ class TestSqlLab(SupersetTestCase): def run_some_queries(self): db.session.query(Query).delete() - db.session.commit() self.run_sql(QUERY_1, client_id="client_id_1", username="admin") self.run_sql(QUERY_2, client_id="client_id_2", username="admin") self.run_sql(QUERY_3, client_id="client_id_3", username="gamma_sqllab") diff --git a/tests/integration_tests/superset_test_config.py b/tests/integration_tests/superset_test_config.py index 04472bfc2464..0935714c5427 100644 --- a/tests/integration_tests/superset_test_config.py +++ b/tests/integration_tests/superset_test_config.py @@ -95,6 +95,7 @@ def GET_FEATURE_FLAGS_FUNC(ff): FAB_ROLES = {"TestRole": [["Security", "menu_access"], ["List Users", "menu_access"]]} +PUBLIC_ROLE_LIKE = "Gamma" AUTH_ROLE_PUBLIC = "Public" EMAIL_NOTIFICATIONS = False REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") # noqa: F405 diff --git a/tests/integration_tests/tags/dao_tests.py b/tests/integration_tests/tags/dao_tests.py index b06e22054ec6..8a6ba6e5f4b3 100644 --- a/tests/integration_tests/tags/dao_tests.py +++ b/tests/integration_tests/tags/dao_tests.py @@ -18,7 +18,6 @@ from operator import and_ from unittest.mock import patch # noqa: F401 import pytest -from superset.daos.exceptions import DAOCreateFailedError, DAOException # noqa: F401 from superset.models.slice import Slice from superset.models.sql_lab import SavedQuery # noqa: F401 from superset.daos.tag import TagDAO @@ -188,6 +187,7 @@ def test_get_objects_from_tag(self): TaggedObject.object_type == ObjectType.chart, ), ) + .join(Tag, TaggedObject.tag_id == Tag.id) .distinct(Slice.id) .count() ) @@ -200,6 +200,7 @@ def test_get_objects_from_tag(self): TaggedObject.object_type == ObjectType.dashboard, ), ) + .join(Tag, TaggedObject.tag_id == Tag.id) .distinct(Dashboard.id) .count() + num_charts diff --git a/tests/unit_tests/commands/databases/create_test.py b/tests/unit_tests/commands/databases/create_test.py index 405238827d5c..09d5744afd53 100644 --- a/tests/unit_tests/commands/databases/create_test.py +++ b/tests/unit_tests/commands/databases/create_test.py @@ -29,7 +29,6 @@ def database_with_catalog(mocker: MockerFixture) -> MagicMock: """ Mock a database with catalogs and schemas. """ - mocker.patch("superset.commands.database.create.db") mocker.patch("superset.commands.database.create.TestConnectionDatabaseCommand") database = mocker.MagicMock() @@ -53,7 +52,6 @@ def database_without_catalog(mocker: MockerFixture) -> MagicMock: """ Mock a database without catalogs. """ - mocker.patch("superset.commands.database.create.db") mocker.patch("superset.commands.database.create.TestConnectionDatabaseCommand") database = mocker.MagicMock() diff --git a/tests/unit_tests/commands/databases/update_test.py b/tests/unit_tests/commands/databases/update_test.py index 300efb62e7d3..37500d521420 100644 --- a/tests/unit_tests/commands/databases/update_test.py +++ b/tests/unit_tests/commands/databases/update_test.py @@ -29,8 +29,6 @@ def database_with_catalog(mocker: MockerFixture) -> MagicMock: """ Mock a database with catalogs and schemas. """ - mocker.patch("superset.commands.database.update.db") - database = mocker.MagicMock() database.database_name = "my_db" database.db_engine_spec.__name__ = "test_engine" @@ -50,8 +48,6 @@ def database_without_catalog(mocker: MockerFixture) -> MagicMock: """ Mock a database without catalogs. """ - mocker.patch("superset.commands.database.update.db") - database = mocker.MagicMock() database.database_name = "my_db" database.db_engine_spec.__name__ = "test_engine" diff --git a/tests/unit_tests/dao/tag_test.py b/tests/unit_tests/dao/tag_test.py index d50e7d8a28e0..7662393d4fc4 100644 --- a/tests/unit_tests/dao/tag_test.py +++ b/tests/unit_tests/dao/tag_test.py @@ -22,7 +22,6 @@ def test_user_favorite_tag(mocker): from superset.daos.tag import TagDAO # Mock the behavior of TagDAO and g - mock_session = mocker.patch("superset.daos.tag.db.session") mock_TagDAO = mocker.patch( "superset.daos.tag.TagDAO" ) # Replace with the actual path to TagDAO @@ -40,14 +39,11 @@ def test_user_favorite_tag(mocker): # Check that users_favorited was updated correctly assert mock_TagDAO.find_by_id().users_favorited == [mock_g.user] - mock_session.commit.assert_called_once() - def test_remove_user_favorite_tag(mocker): from superset.daos.tag import TagDAO # Mock the behavior of TagDAO and g - mock_session = mocker.patch("superset.daos.tag.db.session") mock_TagDAO = mocker.patch("superset.daos.tag.TagDAO") mock_tag = mocker.MagicMock(users_favorited=[]) mock_TagDAO.find_by_id.return_value = mock_tag @@ -68,9 +64,6 @@ def test_remove_user_favorite_tag(mocker): # Check that users_favorited no longer contains the user assert mock_user not in mock_tag.users_favorited - # Check that the db.session.was committed - mock_session.commit.assert_called_once() - def test_remove_user_favorite_tag_no_user(mocker): from superset.daos.tag import TagDAO diff --git a/tests/unit_tests/dao/user_test.py b/tests/unit_tests/dao/user_test.py index a2a74a55497c..bf65c51121fa 100644 --- a/tests/unit_tests/dao/user_test.py +++ b/tests/unit_tests/dao/user_test.py @@ -90,4 +90,3 @@ def test_set_avatar_url_without_existing_attributes(mock_db_session): assert len(user.extra_attributes) == 1 assert user.extra_attributes[0].avatar_url == new_url mock_db_session.add.assert_called() # New attribute should be added - mock_db_session.commit.assert_called() diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py index 488378f7ca3c..f4534d216b9b 100644 --- a/tests/unit_tests/databases/api_test.py +++ b/tests/unit_tests/databases/api_test.py @@ -115,7 +115,7 @@ def test_post_with_uuid( payload = response.json assert payload["result"]["uuid"] == "7c1b7880-a59d-47cd-8bf1-f1eb8d2863cb" - database = db.session.query(Database).one() + database = session.query(Database).one() assert database.uuid == UUID("7c1b7880-a59d-47cd-8bf1-f1eb8d2863cb") diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py index b20578784d03..9b9393d3a735 100644 --- a/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py +++ b/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py @@ -36,7 +36,7 @@ def test_create_ssh_tunnel_command() -> None: ) properties = { - "database_id": database.id, + "database": database, "server_address": "123.132.123.1", "server_port": "3005", "username": "foo", diff --git a/tests/unit_tests/databases/ssh_tunnel/dao_tests.py b/tests/unit_tests/databases/ssh_tunnel/dao_tests.py index 1456f7fd801c..a24a94ec36d1 100644 --- a/tests/unit_tests/databases/ssh_tunnel/dao_tests.py +++ b/tests/unit_tests/databases/ssh_tunnel/dao_tests.py @@ -31,7 +31,6 @@ def test_create_ssh_tunnel(): "username": "foo", "password": "bar", }, - commit=False, ) assert result is not None diff --git a/tests/unit_tests/security/manager_test.py b/tests/unit_tests/security/manager_test.py index a0ec87c52eb9..e35513f2ff8a 100644 --- a/tests/unit_tests/security/manager_test.py +++ b/tests/unit_tests/security/manager_test.py @@ -413,7 +413,6 @@ def test_raise_for_access_chart_owner( owners=[alpha], ) session.add(slice) - session.flush() with override_user(alpha): sm.raise_for_access( diff --git a/tests/unit_tests/utils/lock_tests.py b/tests/unit_tests/utils/lock_tests.py index aa231bb0cf8f..4c9121fe3874 100644 --- a/tests/unit_tests/utils/lock_tests.py +++ b/tests/unit_tests/utils/lock_tests.py @@ -22,8 +22,8 @@ import pytest from freezegun import freeze_time -from sqlalchemy.orm import Session, sessionmaker +from superset import db from superset.exceptions import CreateKeyValueDistributedLockFailedException from superset.key_value.types import JsonKeyValueCodec from superset.utils.lock import get_key, KeyValueDistributedLock @@ -32,56 +32,51 @@ OTHER_KEY = get_key("ns2", a=1, b=2) -def _get_lock(key: UUID, session: Session) -> Any: +def _get_lock(key: UUID) -> Any: from superset.key_value.models import KeyValueEntry - entry = session.query(KeyValueEntry).filter_by(uuid=key).first() + entry = db.session.query(KeyValueEntry).filter_by(uuid=key).first() if entry is None or entry.is_expired(): return None return JsonKeyValueCodec().decode(entry.value) -def _get_other_session() -> Session: - # This session is used to simulate what another worker will find in the metastore - # during the locking process. - from superset import db - - bind = db.session.get_bind() - SessionMaker = sessionmaker(bind=bind) - return SessionMaker() - - def test_key_value_distributed_lock_happy_path() -> None: """ Test successfully acquiring and returning the distributed lock. + + Note we use a nested transaction to ensure that the cleanup from the outer context + manager is correctly invoked, otherwise a partial rollback would occur leaving the + database in a fractured state. """ - session = _get_other_session() with freeze_time("2021-01-01"): - assert _get_lock(MAIN_KEY, session) is None + assert _get_lock(MAIN_KEY) is None + with KeyValueDistributedLock("ns", a=1, b=2) as key: assert key == MAIN_KEY - assert _get_lock(key, session) is True - assert _get_lock(OTHER_KEY, session) is None - with pytest.raises(CreateKeyValueDistributedLockFailedException): - with KeyValueDistributedLock("ns", a=1, b=2): - pass + assert _get_lock(key) is True + assert _get_lock(OTHER_KEY) is None + + with db.session.begin_nested(): + with pytest.raises(CreateKeyValueDistributedLockFailedException): + with KeyValueDistributedLock("ns", a=1, b=2): + pass - assert _get_lock(MAIN_KEY, session) is None + assert _get_lock(MAIN_KEY) is None def test_key_value_distributed_lock_expired() -> None: """ Test expiration of the distributed lock """ - session = _get_other_session() - with freeze_time("2021-01-01T"): - assert _get_lock(MAIN_KEY, session) is None + with freeze_time("2021-01-01"): + assert _get_lock(MAIN_KEY) is None with KeyValueDistributedLock("ns", a=1, b=2): - assert _get_lock(MAIN_KEY, session) is True - with freeze_time("2022-01-01T"): - assert _get_lock(MAIN_KEY, session) is None + assert _get_lock(MAIN_KEY) is True + with freeze_time("2022-01-01"): + assert _get_lock(MAIN_KEY) is None - assert _get_lock(MAIN_KEY, session) is None + assert _get_lock(MAIN_KEY) is None From e274925b2a975165b4c22e87860075c90d7e4832 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C4=90=E1=BB=97=20Tr=E1=BB=8Dng=20H=E1=BA=A3i?= <41283691+hainenber@users.noreply.github.com> Date: Sat, 29 Jun 2024 02:45:38 +0700 Subject: [PATCH 15/31] chore(frontend): remove obsolete ESLint rules in tests (#29405) Signed-off-by: hainenber --- superset-frontend/src/SqlLab/actions/sqlLab.test.js | 1 - superset-frontend/src/explore/actions/exploreActions.test.js | 1 - .../src/explore/components/controls/CheckboxControl.test.tsx | 1 - .../FilterControl/AdhocFilterControl/AdhocFilterControl.test.jsx | 1 - .../AdhocFilterEditPopover/AdhocFilterEditPopover.test.jsx | 1 - .../AdhocFilterEditPopoverSimpleTabContent.test.tsx | 1 - .../AdhocMetricEditPopover/AdhocMetricEditPopover.test.jsx | 1 - .../components/controls/MetricControl/AdhocMetricOption.test.jsx | 1 - .../controls/MetricControl/FilterDefinitionOption.test.jsx | 1 - .../controls/MetricControl/MetricDefinitionValue.test.jsx | 1 - .../components/controls/MetricControl/MetricsControl.test.jsx | 1 - .../src/explore/components/controls/SelectControl.test.jsx | 1 - .../src/explore/components/controls/TextAreaControl.test.jsx | 1 - .../src/explore/components/controls/ViewportControl.test.jsx | 1 - 14 files changed, 14 deletions(-) diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.test.js b/superset-frontend/src/SqlLab/actions/sqlLab.test.js index 11ab424512b0..f889d81ffb71 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.test.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.test.js @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint no-unused-expressions: 0 */ import sinon from 'sinon'; import fetchMock from 'fetch-mock'; import configureMockStore from 'redux-mock-store'; diff --git a/superset-frontend/src/explore/actions/exploreActions.test.js b/superset-frontend/src/explore/actions/exploreActions.test.js index 12bbdce4e7df..77c204cf3fa1 100644 --- a/superset-frontend/src/explore/actions/exploreActions.test.js +++ b/superset-frontend/src/explore/actions/exploreActions.test.js @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import { defaultState } from 'src/explore/store'; import exploreReducer from 'src/explore/reducers/exploreReducer'; import * as actions from 'src/explore/actions/exploreActions'; diff --git a/superset-frontend/src/explore/components/controls/CheckboxControl.test.tsx b/superset-frontend/src/explore/components/controls/CheckboxControl.test.tsx index 84ba56822524..9ad8f891e1cc 100644 --- a/superset-frontend/src/explore/components/controls/CheckboxControl.test.tsx +++ b/superset-frontend/src/explore/components/controls/CheckboxControl.test.tsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import { render, screen } from 'spec/helpers/testing-library'; import { ThemeProvider, supersetTheme } from '@superset-ui/core'; import CheckboxControl from 'src/explore/components/controls/CheckboxControl'; diff --git a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/AdhocFilterControl.test.jsx b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/AdhocFilterControl.test.jsx index ffd2d176018c..55f2ea7018a2 100644 --- a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/AdhocFilterControl.test.jsx +++ b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/AdhocFilterControl.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import sinon from 'sinon'; import { shallow } from 'enzyme'; import { supersetTheme } from '@superset-ui/core'; diff --git a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopover/AdhocFilterEditPopover.test.jsx b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopover/AdhocFilterEditPopover.test.jsx index 594c20fd83be..3a8e8d0745ce 100644 --- a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopover/AdhocFilterEditPopover.test.jsx +++ b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopover/AdhocFilterEditPopover.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import sinon from 'sinon'; import { shallow } from 'enzyme'; import Button from 'src/components/Button'; diff --git a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/AdhocFilterEditPopoverSimpleTabContent.test.tsx b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/AdhocFilterEditPopoverSimpleTabContent.test.tsx index 05f1ad0fc785..1e352cc38d3a 100644 --- a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/AdhocFilterEditPopoverSimpleTabContent.test.tsx +++ b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/AdhocFilterEditPopoverSimpleTabContent.test.tsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import * as redux from 'react-redux'; import sinon from 'sinon'; import { shallow } from 'enzyme'; diff --git a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopover/AdhocMetricEditPopover.test.jsx b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopover/AdhocMetricEditPopover.test.jsx index e51c14f2a6f9..06b8b8a9eeb9 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopover/AdhocMetricEditPopover.test.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopover/AdhocMetricEditPopover.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import sinon from 'sinon'; import { shallow } from 'enzyme'; import { FormItem } from 'src/components/Form'; diff --git a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.test.jsx b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.test.jsx index bf1b501ac172..1ef070de8119 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.test.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import sinon from 'sinon'; import { shallow } from 'enzyme'; diff --git a/superset-frontend/src/explore/components/controls/MetricControl/FilterDefinitionOption.test.jsx b/superset-frontend/src/explore/components/controls/MetricControl/FilterDefinitionOption.test.jsx index fb920cb7c630..a1bbc1cb14db 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/FilterDefinitionOption.test.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/FilterDefinitionOption.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import { render, screen } from 'spec/helpers/testing-library'; import FilterDefinitionOption from 'src/explore/components/controls/MetricControl/FilterDefinitionOption'; import { AGGREGATES } from 'src/explore/constants'; diff --git a/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionValue.test.jsx b/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionValue.test.jsx index 259220bb4bc0..3b51fcc5083a 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionValue.test.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionValue.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import { shallow } from 'enzyme'; import { AGGREGATES } from 'src/explore/constants'; diff --git a/superset-frontend/src/explore/components/controls/MetricControl/MetricsControl.test.jsx b/superset-frontend/src/explore/components/controls/MetricControl/MetricsControl.test.jsx index 1be8b2aa97c3..bfc7349107a2 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/MetricsControl.test.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/MetricsControl.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import sinon from 'sinon'; import { shallow } from 'enzyme'; diff --git a/superset-frontend/src/explore/components/controls/SelectControl.test.jsx b/superset-frontend/src/explore/components/controls/SelectControl.test.jsx index 4b38ef72b1b4..d86c334be17b 100644 --- a/superset-frontend/src/explore/components/controls/SelectControl.test.jsx +++ b/superset-frontend/src/explore/components/controls/SelectControl.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import sinon from 'sinon'; import { shallow } from 'enzyme'; import { Select as SelectComponent } from 'src/components'; diff --git a/superset-frontend/src/explore/components/controls/TextAreaControl.test.jsx b/superset-frontend/src/explore/components/controls/TextAreaControl.test.jsx index e77a69310211..402c69934d71 100644 --- a/superset-frontend/src/explore/components/controls/TextAreaControl.test.jsx +++ b/superset-frontend/src/explore/components/controls/TextAreaControl.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import sinon from 'sinon'; import { styledMount as mount } from 'spec/helpers/theming'; import { TextAreaEditor } from 'src/components/AsyncAceEditor'; diff --git a/superset-frontend/src/explore/components/controls/ViewportControl.test.jsx b/superset-frontend/src/explore/components/controls/ViewportControl.test.jsx index d74612a6108c..282e39f6b1ec 100644 --- a/superset-frontend/src/explore/components/controls/ViewportControl.test.jsx +++ b/superset-frontend/src/explore/components/controls/ViewportControl.test.jsx @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -/* eslint-disable no-unused-expressions */ import { styledMount as mount } from 'spec/helpers/theming'; import Popover from 'src/components/Popover'; From 5c9352fae4cdd7f0a107815102c9086e711867f4 Mon Sep 17 00:00:00 2001 From: ari-jane <123119632+ari-jane@users.noreply.github.com> Date: Fri, 28 Jun 2024 21:46:06 +0200 Subject: [PATCH 16/31] docs: update INTHEWILD.md with bluquist (#29399) --- RESOURCES/INTHEWILD.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RESOURCES/INTHEWILD.md b/RESOURCES/INTHEWILD.md index e39c82ae9c0b..a99bd399b84c 100644 --- a/RESOURCES/INTHEWILD.md +++ b/RESOURCES/INTHEWILD.md @@ -166,6 +166,7 @@ Join our growing community! ### HR / Staffing - [Swile](https://www.swile.co/) [@PaoloTerzi] - [Symmetrics](https://www.symmetrics.fyi) +- [bluquist](https://bluquist.com/) ### Government - [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke] From ba405bacbfa0bbd9bfd8089147ecaddd9a2ec962 Mon Sep 17 00:00:00 2001 From: Shubhendra Kushwaha Date: Sun, 30 Jun 2024 17:38:15 +0530 Subject: [PATCH 17/31] docs: Update INTHEWILD.md with Aveti Learning (#29413) --- RESOURCES/INTHEWILD.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RESOURCES/INTHEWILD.md b/RESOURCES/INTHEWILD.md index a99bd399b84c..da918ef52a36 100644 --- a/RESOURCES/INTHEWILD.md +++ b/RESOURCES/INTHEWILD.md @@ -136,6 +136,7 @@ Join our growing community! - [Zaihang](http://www.zaih.com/) ### Education +- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra] - [Brilliant.org](https://brilliant.org/) - [Platzi.com](https://platzi.com/) - [Sunbird](https://www.sunbird.org/) [@eksteporg] From 179cf269c643c91a804eaf07ba3a66a8fcf65386 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hex=20Caf=C3=A9?= <157834442+hexcafe@users.noreply.github.com> Date: Mon, 1 Jul 2024 16:59:25 +0800 Subject: [PATCH 18/31] fix: SQL label missing for non-group-by queries (#29420) --- superset/models/helpers.py | 4 +++- tests/integration_tests/model_tests.py | 23 +++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 7b211f98b11a..b841426ff717 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -1683,7 +1683,9 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma select_exprs.append( self.convert_tbl_column_to_sqla_col( - columns_by_name[selected], template_processor=template_processor + columns_by_name[selected], + template_processor=template_processor, + label=_column_label, ) if isinstance(selected, str) and selected in columns_by_name else self.make_sqla_column_compatible( diff --git a/tests/integration_tests/model_tests.py b/tests/integration_tests/model_tests.py index 458168009be1..fb22f40fb221 100644 --- a/tests/integration_tests/model_tests.py +++ b/tests/integration_tests/model_tests.py @@ -556,6 +556,29 @@ def test_query_with_non_existent_metrics(self): self.assertTrue("Metric 'invalid' does not exist", context.exception) + def test_query_label_without_group_by(self): + tbl = self.get_table(name="birth_names") + query_obj = dict( + groupby=[], + columns=[ + "gender", + { + "label": "Given Name", + "sqlExpression": "name", + "expressionType": "SQL", + }, + ], + filter=[], + is_timeseries=False, + granularity=None, + from_dttm=None, + to_dttm=None, + extras={}, + ) + + sql = tbl.get_query_str(query_obj) + self.assertRegex(sql, r'name AS ["`]?Given Name["`]?') + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_data_for_slices_with_no_query_context(self): tbl = self.get_table(name="birth_names") From 42773b9b1c35cc38f53768a961a6b01156872326 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 10:02:32 -0600 Subject: [PATCH 19/31] chore(deps): bump stream from 0.0.2 to 0.0.3 in /docs (#29431) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/package.json | 2 +- docs/yarn.lock | 20 ++++++++++---------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/package.json b/docs/package.json index 102cd80a77b3..946715940ffd 100644 --- a/docs/package.json +++ b/docs/package.json @@ -41,7 +41,7 @@ "react-dom": "^18.3.1", "react-github-btn": "^1.4.0", "react-svg-pan-zoom": "^3.12.1", - "stream": "^0.0.2", + "stream": "^0.0.3", "swagger-ui-react": "^5.17.14", "url-loader": "^4.1.1" }, diff --git a/docs/yarn.lock b/docs/yarn.lock index c522e53040bf..f6e2cbff6304 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -3924,6 +3924,11 @@ common-path-prefix@^3.0.0: resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== +component-emitter@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-2.0.0.tgz#3a137dfe66fcf2efe3eab7cb7d5f51741b3620c6" + integrity sha512-4m5s3Me2xxlVKG9PkZpQqHQR7bgpnN7joDMJ4yvVkVXngjoITG76IaZmzmywSeRTeTpc6N6r3H3+KyUurV8OYw== + compressible@~2.0.16: version "2.0.18" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" @@ -4566,11 +4571,6 @@ electron-to-chromium@^1.4.668: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.752.tgz#99227455547c8254488e3dab7d316c34a2c067b8" integrity sha512-P3QJreYI/AUTcfBVrC4zy9KvnZWekViThgQMX/VpJ+IsOBbcX5JFpORM4qWapwWQ+agb2nYAOyn/4PMXOk0m2Q== -emitter-component@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/emitter-component/-/emitter-component-1.1.2.tgz#d65af5833dc7c682fd0ade35f902d16bc4bad772" - integrity sha512-QdXO3nXOzZB4pAjM0n6ZE+R9/+kPpECA/XSELIcc54NeYVnBqIk+4DFiBgK+8QbV3mdvTG6nedl7dTYgO+5wDw== - emoji-regex@^8.0.0: version "8.0.0" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" @@ -9465,12 +9465,12 @@ std-env@^3.0.1: resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.7.0.tgz#c9f7386ced6ecf13360b6c6c55b8aaa4ef7481d2" integrity sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg== -stream@^0.0.2: - version "0.0.2" - resolved "https://registry.yarnpkg.com/stream/-/stream-0.0.2.tgz#7f5363f057f6592c5595f00bc80a27f5cec1f0ef" - integrity sha512-gCq3NDI2P35B2n6t76YJuOp7d6cN/C7Rt0577l91wllh0sY9ZBuw9KaSGqH/b0hzn3CWWJbpbW0W0WvQ1H/Q7g== +stream@^0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/stream/-/stream-0.0.3.tgz#3f3934a900a561ce3e2b9ffbd2819cead32699d9" + integrity sha512-aMsbn7VKrl4A2T7QAQQbzgN7NVc70vgF5INQrBXqn4dCXN1zy3L9HGgLO5s7PExmdrzTJ8uR/27aviW8or8/+A== dependencies: - emitter-component "^1.1.1" + component-emitter "^2.0.0" string-convert@^0.2.0: version "0.2.1" From cfe29406e566fdf2aff3b7d03bcd748ad8fb637e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 10:02:49 -0600 Subject: [PATCH 20/31] chore(deps-dev): bump typescript from 5.4.5 to 5.5.2 in /docs (#29432) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/package.json | 2 +- docs/yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/package.json b/docs/package.json index 946715940ffd..225bfb42520f 100644 --- a/docs/package.json +++ b/docs/package.json @@ -49,7 +49,7 @@ "@docusaurus/module-type-aliases": "^3.4.0", "@docusaurus/tsconfig": "^3.4.0", "@types/react": "^18.3.3", - "typescript": "^5.4.5", + "typescript": "^5.5.2", "webpack": "^5.91.0" }, "browserslist": { diff --git a/docs/yarn.lock b/docs/yarn.lock index f6e2cbff6304..70743d4e4734 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -9893,10 +9893,10 @@ types-ramda@^0.30.0: dependencies: ts-toolbelt "^9.6.0" -typescript@^5.4.5: - version "5.4.5" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.5.tgz#42ccef2c571fdbd0f6718b1d1f5e6e5ef006f611" - integrity sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ== +typescript@^5.5.2: + version "5.5.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.2.tgz#c26f023cb0054e657ce04f72583ea2d85f8d0507" + integrity sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew== undici-types@~5.26.4: version "5.26.5" From 5aac1b59b1bbe4f65e4f0014f93b864e6da8b0dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 10:03:39 -0600 Subject: [PATCH 21/31] chore(deps): bump rehype-raw from 6.1.1 to 7.0.0 in /superset-frontend (#29433) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-frontend/package-lock.json | 1643 +++++++++++++---- .../packages/superset-ui-core/package.json | 2 +- 2 files changed, 1236 insertions(+), 409 deletions(-) diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 03cb16269688..caed62bb309e 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -23483,12 +23483,6 @@ "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" }, - "node_modules/@types/parse5": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-6.0.3.tgz", - "integrity": "sha512-SuT16Q1K51EAVPz1K29DJ/sXjhSQ0zjvsypYJ6tlwVsRV9jwW5Adq2ch8Dq8kDBCkYnELS7N7VNCSB5nC56t/g==", - "license": "MIT" - }, "node_modules/@types/prettier": { "version": "2.7.3", "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.3.tgz", @@ -32777,6 +32771,18 @@ "node": ">= 4.2.1" } }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/devtools-protocol": { "version": "0.0.1232444", "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1232444.tgz", @@ -38681,11 +38687,264 @@ "node": ">= 0.4" } }, + "node_modules/hast-util-from-parse5": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.1.tgz", + "integrity": "sha512-Er/Iixbc7IEa7r/XLtuG52zoqn/b3Xng/w6aZQ0xGVxzhw5xUFxcRqdPzP6yFi/4HBYRaifaI5fQ1RH8n0ZeOQ==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "hastscript": "^8.0.0", + "property-information": "^6.0.0", + "vfile": "^6.0.0", + "vfile-location": "^5.0.0", + "web-namespaces": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-from-parse5/node_modules/@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" + }, + "node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-from-parse5/node_modules/hast-util-parse-selector": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/hastscript": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-8.0.0.tgz", + "integrity": "sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw==", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-from-parse5/node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-from-parse5/node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/hast-util-parse-selector": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.1.tgz", "integrity": "sha512-Xyh0v+nHmQvrOqop2Jqd8gOdyQtE8sIP9IQf7mlVDqp924W4w/8Liuguk2L2qei9hARnQSG2m+wAOCxM7npJVw==" }, + "node_modules/hast-util-raw": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.0.4.tgz", + "integrity": "sha512-LHE65TD2YiNsHD3YuXcKPHXPLuYh/gjp12mOfU8jxSrm1f/yJpsb0F/KKljS6U9LJoP0Ux+tCe8iJ2AsPzTdgA==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "@ungap/structured-clone": "^1.0.0", + "hast-util-from-parse5": "^8.0.0", + "hast-util-to-parse5": "^8.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "parse5": "^7.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-raw/node_modules/@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" + }, + "node_modules/hast-util-raw/node_modules/unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw/node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw/node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw/node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw/node_modules/unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw/node_modules/vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw/node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/hast-util-sanitize": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/hast-util-sanitize/-/hast-util-sanitize-5.0.1.tgz", @@ -38800,6 +39059,59 @@ "inline-style-parser": "0.1.1" } }, + "node_modules/hast-util-to-parse5": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz", + "integrity": "sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw==", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-parse5/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-parse5/node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-parse5/node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/hast-util-whitespace": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.0.tgz", @@ -38990,6 +39302,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/html-void-elements": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", + "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/html-webpack-plugin": { "version": "5.6.0", "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.6.0.tgz", @@ -51380,6 +51701,221 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", + "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/mdast-util-to-hast/node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/mdast-util-to-hast/node_modules/@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" + }, + "node_modules/mdast-util-to-hast/node_modules/micromark-util-character": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/mdast-util-to-hast/node_modules/micromark-util-encode": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz", + "integrity": "sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz", + "integrity": "sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/mdast-util-to-hast/node_modules/micromark-util-symbol": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", + "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/mdast-util-to-hast/node_modules/micromark-util-types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.0.tgz", + "integrity": "sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/mdast-util-to-hast/node_modules/unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast/node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast/node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast/node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast/node_modules/vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast/node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/mdast-util-to-string": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-1.1.0.tgz", @@ -60210,173 +60746,71 @@ } }, "node_modules/rehype-raw": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-6.1.1.tgz", - "integrity": "sha512-d6AKtisSRtDRX4aSPsJGTfnzrX2ZkHQLE5kiUuGOeEoLpbEulFF4hj0mLPbsa+7vmguDKOVVEQdHKDSwoaIDsQ==", - "dependencies": { - "@types/hast": "^2.0.0", - "hast-util-raw": "^7.2.0", - "unified": "^10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/rehype-raw/node_modules/comma-separated-tokens": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.2.tgz", - "integrity": "sha512-G5yTt3KQN4Yn7Yk4ed73hlZ1evrFKXeUW3086p3PRFNp7m2vIjI6Pg+Kgb+oyzhd9F2qdcoj67+y3SdxL5XWsg==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/rehype-raw/node_modules/hast-to-hyperscript": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-10.0.1.tgz", - "integrity": "sha512-dhIVGoKCQVewFi+vz3Vt567E4ejMppS1haBRL6TEmeLeJVB1i/FJIIg/e6s1Bwn0g5qtYojHEKvyGA+OZuyifw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-7.0.0.tgz", + "integrity": "sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==", "dependencies": { - "@types/unist": "^2.0.0", - "comma-separated-tokens": "^2.0.0", - "property-information": "^6.0.0", - "space-separated-tokens": "^2.0.0", - "style-to-object": "^0.3.0", - "unist-util-is": "^5.0.0", - "web-namespaces": "^2.0.0" + "@types/hast": "^3.0.0", + "hast-util-raw": "^9.0.0", + "vfile": "^6.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/rehype-raw/node_modules/hast-util-from-parse5": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-7.1.0.tgz", - "integrity": "sha512-m8yhANIAccpU4K6+121KpPP55sSl9/samzQSQGpb0mTExcNh2WlvjtMwSWFhg6uqD4Rr6Nfa8N6TMypQM51rzQ==", + "node_modules/rehype-raw/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", "dependencies": { - "@types/hast": "^2.0.0", - "@types/parse5": "^6.0.0", - "@types/unist": "^2.0.0", - "hastscript": "^7.0.0", - "property-information": "^6.0.0", - "vfile": "^5.0.0", - "vfile-location": "^4.0.0", - "web-namespaces": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "@types/unist": "*" } }, - "node_modules/rehype-raw/node_modules/hast-util-parse-selector": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.0.tgz", - "integrity": "sha512-AyjlI2pTAZEOeu7GeBPZhROx0RHBnydkQIXlhnFzDi0qfXTmGUWoCYZtomHbrdrheV4VFUlPcfJ6LMF5T6sQzg==", - "dependencies": { - "@types/hast": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } + "node_modules/rehype-raw/node_modules/@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" }, - "node_modules/rehype-raw/node_modules/hast-util-raw": { - "version": "7.2.2", - "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-7.2.2.tgz", - "integrity": "sha512-0x3BhhdlBcqRIKyc095lBSDvmQNMY3Eulj2PLsT5XCyKYrxssI5yr3P4Kv/PBo1s/DMkZy2voGkMXECnFCZRLQ==", + "node_modules/rehype-raw/node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", "dependencies": { - "@types/hast": "^2.0.0", - "@types/parse5": "^6.0.0", - "hast-util-from-parse5": "^7.0.0", - "hast-util-to-parse5": "^7.0.0", - "html-void-elements": "^2.0.0", - "parse5": "^6.0.0", - "unist-util-position": "^4.0.0", - "unist-util-visit": "^4.0.0", - "vfile": "^5.0.0", - "web-namespaces": "^2.0.0", - "zwitch": "^2.0.0" + "@types/unist": "^3.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/rehype-raw/node_modules/hast-util-to-parse5": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-7.0.0.tgz", - "integrity": "sha512-YHiS6aTaZ3N0Q3nxaY/Tj98D6kM8QX5Q8xqgg8G45zR7PvWnPGPP0vcKCgb/moIydEJ/QWczVrX0JODCVeoV7A==", + "node_modules/rehype-raw/node_modules/vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", "dependencies": { - "@types/hast": "^2.0.0", - "@types/parse5": "^6.0.0", - "hast-to-hyperscript": "^10.0.0", - "property-information": "^6.0.0", - "web-namespaces": "^2.0.0", - "zwitch": "^2.0.0" + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/rehype-raw/node_modules/hastscript": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.1.0.tgz", - "integrity": "sha512-uBjaTTLN0MkCZxY/R2fWUOcu7FRtUVzKRO5P/RAfgsu3yFiMB1JWCO4AjeVkgHxAira1f2UecHK5WfS9QurlWA==", + "node_modules/rehype-raw/node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", "dependencies": { - "@types/hast": "^2.0.0", - "comma-separated-tokens": "^2.0.0", - "hast-util-parse-selector": "^3.0.0", - "property-information": "^6.0.0", - "space-separated-tokens": "^2.0.0" + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/rehype-raw/node_modules/html-void-elements": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.1.tgz", - "integrity": "sha512-0quDb7s97CfemeJAnW9wC0hw78MtW7NU3hqtCD75g2vFlDLt36llsYD7uB7SUzojLMP24N5IatXf7ylGXiGG9A==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/rehype-raw/node_modules/parse5": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", - "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" - }, - "node_modules/rehype-raw/node_modules/property-information": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.1.1.tgz", - "integrity": "sha512-hrzC564QIl0r0vy4l6MvRLhafmUowhO/O3KgVSoXIbbA2Sz4j8HGpJc6T2cubRVwMwpdiG/vKGfhT4IixmKN9w==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/rehype-raw/node_modules/space-separated-tokens": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.1.tgz", - "integrity": "sha512-ekwEbFp5aqSPKaqeY1PGrlGQxPNaq+Cnx4+bE2D8sciBQrHpbwoBbawqTN2+6jPs9IdWxxiUcN0K2pkczD3zmw==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/rehype-raw/node_modules/web-namespaces": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", - "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, "node_modules/rehype-sanitize": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/rehype-sanitize/-/rehype-sanitize-6.0.0.tgz", @@ -65461,12 +65895,56 @@ } }, "node_modules/vfile-location": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-4.0.1.tgz", - "integrity": "sha512-JDxPlTbZrZCQXogGheBHjbRWjESSPEak770XwWPfw5mTc1v1nWGLB/apzZxsx8a0SJVfF8HK8ql8RD308vXRUw==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.2.tgz", + "integrity": "sha512-NXPYyxyBSH7zB5U6+3uDdd6Nybz6o6/od9rk8bp9H8GR3L+cm/fC0uUTbqBmUTnMCUDslAGBOIKNfvvb+gGlDg==", "dependencies": { - "@types/unist": "^2.0.0", - "vfile": "^5.0.0" + "@types/unist": "^3.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-location/node_modules/@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" + }, + "node_modules/vfile-location/node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-location/node_modules/vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-location/node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" }, "funding": { "type": "opencollective", @@ -65667,6 +66145,15 @@ "defaults": "^1.0.3" } }, + "node_modules/web-namespaces": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", + "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/web-streams-polyfill": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", @@ -68195,7 +68682,7 @@ "pretty-ms": "^7.0.0", "react-error-boundary": "^1.2.5", "react-markdown": "^8.0.3", - "rehype-raw": "^6.1.1", + "rehype-raw": "^7.0.0", "rehype-sanitize": "^6.0.0", "remark-gfm": "^3.0.1", "reselect": "^4.0.0", @@ -87229,7 +87716,7 @@ "pretty-ms": "^7.0.0", "react-error-boundary": "^1.2.5", "react-markdown": "^8.0.3", - "rehype-raw": "^6.1.1", + "rehype-raw": "^7.0.0", "rehype-sanitize": "^6.0.0", "remark-gfm": "^3.0.1", "reselect": "^4.0.0", @@ -91210,11 +91697,6 @@ "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" }, - "@types/parse5": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-6.0.3.tgz", - "integrity": "sha512-SuT16Q1K51EAVPz1K29DJ/sXjhSQ0zjvsypYJ6tlwVsRV9jwW5Adq2ch8Dq8kDBCkYnELS7N7VNCSB5nC56t/g==" - }, "@types/prettier": { "version": "2.7.3", "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.3.tgz", @@ -98518,6 +99000,14 @@ "debug": "^2.6.0" } }, + "devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "requires": { + "dequal": "^2.0.0" + } + }, "devtools-protocol": { "version": "0.0.1232444", "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1232444.tgz", @@ -103036,11 +103526,200 @@ "function-bind": "^1.1.2" } }, + "hast-util-from-parse5": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.1.tgz", + "integrity": "sha512-Er/Iixbc7IEa7r/XLtuG52zoqn/b3Xng/w6aZQ0xGVxzhw5xUFxcRqdPzP6yFi/4HBYRaifaI5fQ1RH8n0ZeOQ==", + "requires": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "hastscript": "^8.0.0", + "property-information": "^6.0.0", + "vfile": "^6.0.0", + "vfile-location": "^5.0.0", + "web-namespaces": "^2.0.0" + }, + "dependencies": { + "@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "requires": { + "@types/unist": "*" + } + }, + "@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" + }, + "comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==" + }, + "hast-util-parse-selector": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", + "requires": { + "@types/hast": "^3.0.0" + } + }, + "hastscript": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-8.0.0.tgz", + "integrity": "sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw==", + "requires": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0" + } + }, + "property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==" + }, + "space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==" + }, + "unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "requires": { + "@types/unist": "^3.0.0" + } + }, + "vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", + "requires": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + } + }, + "vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "requires": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + } + } + } + }, "hast-util-parse-selector": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.1.tgz", "integrity": "sha512-Xyh0v+nHmQvrOqop2Jqd8gOdyQtE8sIP9IQf7mlVDqp924W4w/8Liuguk2L2qei9hARnQSG2m+wAOCxM7npJVw==" }, + "hast-util-raw": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.0.4.tgz", + "integrity": "sha512-LHE65TD2YiNsHD3YuXcKPHXPLuYh/gjp12mOfU8jxSrm1f/yJpsb0F/KKljS6U9LJoP0Ux+tCe8iJ2AsPzTdgA==", + "requires": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "@ungap/structured-clone": "^1.0.0", + "hast-util-from-parse5": "^8.0.0", + "hast-util-to-parse5": "^8.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "parse5": "^7.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" + }, + "dependencies": { + "@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "requires": { + "@types/unist": "*" + } + }, + "@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" + }, + "unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "requires": { + "@types/unist": "^3.0.0" + } + }, + "unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "requires": { + "@types/unist": "^3.0.0" + } + }, + "unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "requires": { + "@types/unist": "^3.0.0" + } + }, + "unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", + "requires": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + } + }, + "unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", + "requires": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + } + }, + "vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", + "requires": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + } + }, + "vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "requires": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + } + } + } + }, "hast-util-sanitize": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/hast-util-sanitize/-/hast-util-sanitize-5.0.1.tgz", @@ -103135,6 +103814,45 @@ } } }, + "hast-util-to-parse5": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz", + "integrity": "sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw==", + "requires": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" + }, + "dependencies": { + "@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "requires": { + "@types/unist": "*" + } + }, + "comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==" + }, + "property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==" + }, + "space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==" + } + } + }, "hast-util-whitespace": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.0.tgz", @@ -103285,6 +104003,11 @@ "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.3.1.tgz", "integrity": "sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==" }, + "html-void-elements": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", + "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==" + }, "html-webpack-plugin": { "version": "5.6.0", "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.6.0.tgz", @@ -112405,186 +113128,321 @@ } } }, - "mdast-util-mdx-expression": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-1.3.2.tgz", - "integrity": "sha512-xIPmR5ReJDu/DHH1OoIT1HkuybIfRGYRywC+gJtI7qHjCJp/M9jrmBEJW22O8lskDWm562BX2W8TiAwRTb0rKA==", - "dev": true, + "mdast-util-mdx-expression": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-1.3.2.tgz", + "integrity": "sha512-xIPmR5ReJDu/DHH1OoIT1HkuybIfRGYRywC+gJtI7qHjCJp/M9jrmBEJW22O8lskDWm562BX2W8TiAwRTb0rKA==", + "dev": true, + "requires": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" + }, + "dependencies": { + "@types/estree-jsx": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.4.tgz", + "integrity": "sha512-5idy3hvI9lAMqsyilBM+N+boaCf1MgoefbDxN6KEO5aK17TOHwFAYT9sjxzeKAiIWRUBgLxmZ9mPcnzZXtTcRQ==", + "dev": true, + "requires": { + "@types/estree": "*" + } + }, + "mdast-util-phrasing": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz", + "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0", + "unist-util-is": "^5.0.0" + } + }, + "mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + } + }, + "mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0" + } + } + } + }, + "mdast-util-mdx-jsx": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-2.1.4.tgz", + "integrity": "sha512-DtMn9CmVhVzZx3f+optVDF8yFgQVt7FghCRNdlIaS3X5Bnym3hZwPbg/XW86vdpKjlc1PVj26SpnLGeJBXD3JA==", + "dev": true, + "requires": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "ccount": "^2.0.0", + "mdast-util-from-markdown": "^1.1.0", + "mdast-util-to-markdown": "^1.3.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-remove-position": "^4.0.0", + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" + }, + "dependencies": { + "@types/estree-jsx": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.4.tgz", + "integrity": "sha512-5idy3hvI9lAMqsyilBM+N+boaCf1MgoefbDxN6KEO5aK17TOHwFAYT9sjxzeKAiIWRUBgLxmZ9mPcnzZXtTcRQ==", + "dev": true, + "requires": { + "@types/estree": "*" + } + }, + "mdast-util-phrasing": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz", + "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0", + "unist-util-is": "^5.0.0" + } + }, + "mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + } + }, + "mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0" + } + } + } + }, + "mdast-util-mdxjs-esm": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-1.3.1.tgz", + "integrity": "sha512-SXqglS0HrEvSdUEfoXFtcg7DRl7S2cwOXc7jkuusG472Mmjag34DUDeOJUZtl+BVnyeO1frIgVpHlNRWc2gk/w==", + "dev": true, + "requires": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" + }, + "dependencies": { + "@types/estree-jsx": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.4.tgz", + "integrity": "sha512-5idy3hvI9lAMqsyilBM+N+boaCf1MgoefbDxN6KEO5aK17TOHwFAYT9sjxzeKAiIWRUBgLxmZ9mPcnzZXtTcRQ==", + "dev": true, + "requires": { + "@types/estree": "*" + } + }, + "mdast-util-phrasing": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz", + "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0", + "unist-util-is": "^5.0.0" + } + }, + "mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + } + }, + "mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0" + } + } + } + }, + "mdast-util-to-hast": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", + "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", "requires": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^2.0.0", - "@types/mdast": "^3.0.0", - "mdast-util-from-markdown": "^1.0.0", - "mdast-util-to-markdown": "^1.0.0" + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" }, "dependencies": { - "@types/estree-jsx": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.4.tgz", - "integrity": "sha512-5idy3hvI9lAMqsyilBM+N+boaCf1MgoefbDxN6KEO5aK17TOHwFAYT9sjxzeKAiIWRUBgLxmZ9mPcnzZXtTcRQ==", - "dev": true, + "@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", "requires": { - "@types/estree": "*" + "@types/unist": "*" } }, - "mdast-util-phrasing": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz", - "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==", - "dev": true, + "@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", "requires": { - "@types/mdast": "^3.0.0", - "unist-util-is": "^5.0.0" + "@types/unist": "*" } }, - "mdast-util-to-markdown": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", - "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", - "dev": true, - "requires": { - "@types/mdast": "^3.0.0", - "@types/unist": "^2.0.0", - "longest-streak": "^3.0.0", - "mdast-util-phrasing": "^3.0.0", - "mdast-util-to-string": "^3.0.0", - "micromark-util-decode-string": "^1.0.0", - "unist-util-visit": "^4.0.0", - "zwitch": "^2.0.0" - } + "@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" }, - "mdast-util-to-string": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", - "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", - "dev": true, + "micromark-util-character": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "requires": { - "@types/mdast": "^3.0.0" + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } - } - } - }, - "mdast-util-mdx-jsx": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-2.1.4.tgz", - "integrity": "sha512-DtMn9CmVhVzZx3f+optVDF8yFgQVt7FghCRNdlIaS3X5Bnym3hZwPbg/XW86vdpKjlc1PVj26SpnLGeJBXD3JA==", - "dev": true, - "requires": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^2.0.0", - "@types/mdast": "^3.0.0", - "@types/unist": "^2.0.0", - "ccount": "^2.0.0", - "mdast-util-from-markdown": "^1.1.0", - "mdast-util-to-markdown": "^1.3.0", - "parse-entities": "^4.0.0", - "stringify-entities": "^4.0.0", - "unist-util-remove-position": "^4.0.0", - "unist-util-stringify-position": "^3.0.0", - "vfile-message": "^3.0.0" - }, - "dependencies": { - "@types/estree-jsx": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.4.tgz", - "integrity": "sha512-5idy3hvI9lAMqsyilBM+N+boaCf1MgoefbDxN6KEO5aK17TOHwFAYT9sjxzeKAiIWRUBgLxmZ9mPcnzZXtTcRQ==", - "dev": true, + }, + "micromark-util-encode": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz", + "integrity": "sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==" + }, + "micromark-util-sanitize-uri": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz", + "integrity": "sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==", "requires": { - "@types/estree": "*" + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" } }, - "mdast-util-phrasing": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz", - "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==", - "dev": true, + "micromark-util-symbol": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", + "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==" + }, + "micromark-util-types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.0.tgz", + "integrity": "sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==" + }, + "unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", "requires": { - "@types/mdast": "^3.0.0", - "unist-util-is": "^5.0.0" + "@types/unist": "^3.0.0" } }, - "mdast-util-to-markdown": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", - "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", - "dev": true, + "unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", "requires": { - "@types/mdast": "^3.0.0", - "@types/unist": "^2.0.0", - "longest-streak": "^3.0.0", - "mdast-util-phrasing": "^3.0.0", - "mdast-util-to-string": "^3.0.0", - "micromark-util-decode-string": "^1.0.0", - "unist-util-visit": "^4.0.0", - "zwitch": "^2.0.0" + "@types/unist": "^3.0.0" } }, - "mdast-util-to-string": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", - "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", - "dev": true, + "unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", "requires": { - "@types/mdast": "^3.0.0" + "@types/unist": "^3.0.0" } - } - } - }, - "mdast-util-mdxjs-esm": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-1.3.1.tgz", - "integrity": "sha512-SXqglS0HrEvSdUEfoXFtcg7DRl7S2cwOXc7jkuusG472Mmjag34DUDeOJUZtl+BVnyeO1frIgVpHlNRWc2gk/w==", - "dev": true, - "requires": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^2.0.0", - "@types/mdast": "^3.0.0", - "mdast-util-from-markdown": "^1.0.0", - "mdast-util-to-markdown": "^1.0.0" - }, - "dependencies": { - "@types/estree-jsx": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.4.tgz", - "integrity": "sha512-5idy3hvI9lAMqsyilBM+N+boaCf1MgoefbDxN6KEO5aK17TOHwFAYT9sjxzeKAiIWRUBgLxmZ9mPcnzZXtTcRQ==", - "dev": true, + }, + "unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", "requires": { - "@types/estree": "*" + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" } }, - "mdast-util-phrasing": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz", - "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==", - "dev": true, + "unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", "requires": { - "@types/mdast": "^3.0.0", - "unist-util-is": "^5.0.0" + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" } }, - "mdast-util-to-markdown": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", - "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", - "dev": true, + "vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", "requires": { - "@types/mdast": "^3.0.0", - "@types/unist": "^2.0.0", - "longest-streak": "^3.0.0", - "mdast-util-phrasing": "^3.0.0", - "mdast-util-to-string": "^3.0.0", - "micromark-util-decode-string": "^1.0.0", - "unist-util-visit": "^4.0.0", - "zwitch": "^2.0.0" + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" } }, - "mdast-util-to-string": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", - "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", - "dev": true, + "vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", "requires": { - "@types/mdast": "^3.0.0" + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" } } } @@ -119112,124 +119970,54 @@ } }, "rehype-raw": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-6.1.1.tgz", - "integrity": "sha512-d6AKtisSRtDRX4aSPsJGTfnzrX2ZkHQLE5kiUuGOeEoLpbEulFF4hj0mLPbsa+7vmguDKOVVEQdHKDSwoaIDsQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-7.0.0.tgz", + "integrity": "sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==", "requires": { - "@types/hast": "^2.0.0", - "hast-util-raw": "^7.2.0", - "unified": "^10.0.0" + "@types/hast": "^3.0.0", + "hast-util-raw": "^9.0.0", + "vfile": "^6.0.0" }, "dependencies": { - "comma-separated-tokens": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.2.tgz", - "integrity": "sha512-G5yTt3KQN4Yn7Yk4ed73hlZ1evrFKXeUW3086p3PRFNp7m2vIjI6Pg+Kgb+oyzhd9F2qdcoj67+y3SdxL5XWsg==" - }, - "hast-to-hyperscript": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-10.0.1.tgz", - "integrity": "sha512-dhIVGoKCQVewFi+vz3Vt567E4ejMppS1haBRL6TEmeLeJVB1i/FJIIg/e6s1Bwn0g5qtYojHEKvyGA+OZuyifw==", - "requires": { - "@types/unist": "^2.0.0", - "comma-separated-tokens": "^2.0.0", - "property-information": "^6.0.0", - "space-separated-tokens": "^2.0.0", - "style-to-object": "^0.3.0", - "unist-util-is": "^5.0.0", - "web-namespaces": "^2.0.0" - } - }, - "hast-util-from-parse5": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-7.1.0.tgz", - "integrity": "sha512-m8yhANIAccpU4K6+121KpPP55sSl9/samzQSQGpb0mTExcNh2WlvjtMwSWFhg6uqD4Rr6Nfa8N6TMypQM51rzQ==", + "@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", "requires": { - "@types/hast": "^2.0.0", - "@types/parse5": "^6.0.0", - "@types/unist": "^2.0.0", - "hastscript": "^7.0.0", - "property-information": "^6.0.0", - "vfile": "^5.0.0", - "vfile-location": "^4.0.0", - "web-namespaces": "^2.0.0" + "@types/unist": "*" } }, - "hast-util-parse-selector": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.0.tgz", - "integrity": "sha512-AyjlI2pTAZEOeu7GeBPZhROx0RHBnydkQIXlhnFzDi0qfXTmGUWoCYZtomHbrdrheV4VFUlPcfJ6LMF5T6sQzg==", - "requires": { - "@types/hast": "^2.0.0" - } + "@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" }, - "hast-util-raw": { - "version": "7.2.2", - "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-7.2.2.tgz", - "integrity": "sha512-0x3BhhdlBcqRIKyc095lBSDvmQNMY3Eulj2PLsT5XCyKYrxssI5yr3P4Kv/PBo1s/DMkZy2voGkMXECnFCZRLQ==", + "unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", "requires": { - "@types/hast": "^2.0.0", - "@types/parse5": "^6.0.0", - "hast-util-from-parse5": "^7.0.0", - "hast-util-to-parse5": "^7.0.0", - "html-void-elements": "^2.0.0", - "parse5": "^6.0.0", - "unist-util-position": "^4.0.0", - "unist-util-visit": "^4.0.0", - "vfile": "^5.0.0", - "web-namespaces": "^2.0.0", - "zwitch": "^2.0.0" + "@types/unist": "^3.0.0" } }, - "hast-util-to-parse5": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-7.0.0.tgz", - "integrity": "sha512-YHiS6aTaZ3N0Q3nxaY/Tj98D6kM8QX5Q8xqgg8G45zR7PvWnPGPP0vcKCgb/moIydEJ/QWczVrX0JODCVeoV7A==", + "vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", "requires": { - "@types/hast": "^2.0.0", - "@types/parse5": "^6.0.0", - "hast-to-hyperscript": "^10.0.0", - "property-information": "^6.0.0", - "web-namespaces": "^2.0.0", - "zwitch": "^2.0.0" + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" } }, - "hastscript": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.1.0.tgz", - "integrity": "sha512-uBjaTTLN0MkCZxY/R2fWUOcu7FRtUVzKRO5P/RAfgsu3yFiMB1JWCO4AjeVkgHxAira1f2UecHK5WfS9QurlWA==", + "vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", "requires": { - "@types/hast": "^2.0.0", - "comma-separated-tokens": "^2.0.0", - "hast-util-parse-selector": "^3.0.0", - "property-information": "^6.0.0", - "space-separated-tokens": "^2.0.0" + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" } - }, - "html-void-elements": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.1.tgz", - "integrity": "sha512-0quDb7s97CfemeJAnW9wC0hw78MtW7NU3hqtCD75g2vFlDLt36llsYD7uB7SUzojLMP24N5IatXf7ylGXiGG9A==" - }, - "parse5": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", - "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" - }, - "property-information": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.1.1.tgz", - "integrity": "sha512-hrzC564QIl0r0vy4l6MvRLhafmUowhO/O3KgVSoXIbbA2Sz4j8HGpJc6T2cubRVwMwpdiG/vKGfhT4IixmKN9w==" - }, - "space-separated-tokens": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.1.tgz", - "integrity": "sha512-ekwEbFp5aqSPKaqeY1PGrlGQxPNaq+Cnx4+bE2D8sciBQrHpbwoBbawqTN2+6jPs9IdWxxiUcN0K2pkczD3zmw==" - }, - "web-namespaces": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", - "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==" } } }, @@ -123128,12 +123916,46 @@ } }, "vfile-location": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-4.0.1.tgz", - "integrity": "sha512-JDxPlTbZrZCQXogGheBHjbRWjESSPEak770XwWPfw5mTc1v1nWGLB/apzZxsx8a0SJVfF8HK8ql8RD308vXRUw==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.2.tgz", + "integrity": "sha512-NXPYyxyBSH7zB5U6+3uDdd6Nybz6o6/od9rk8bp9H8GR3L+cm/fC0uUTbqBmUTnMCUDslAGBOIKNfvvb+gGlDg==", "requires": { - "@types/unist": "^2.0.0", - "vfile": "^5.0.0" + "@types/unist": "^3.0.0", + "vfile": "^6.0.0" + }, + "dependencies": { + "@types/unist": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", + "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==" + }, + "unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "requires": { + "@types/unist": "^3.0.0" + } + }, + "vfile": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz", + "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==", + "requires": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + } + }, + "vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "requires": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + } + } } }, "vfile-message": { @@ -123290,6 +124112,11 @@ "defaults": "^1.0.3" } }, + "web-namespaces": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", + "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==" + }, "web-streams-polyfill": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", diff --git a/superset-frontend/packages/superset-ui-core/package.json b/superset-frontend/packages/superset-ui-core/package.json index 08bbf7806fa8..e69522494747 100644 --- a/superset-frontend/packages/superset-ui-core/package.json +++ b/superset-frontend/packages/superset-ui-core/package.json @@ -54,7 +54,7 @@ "pretty-ms": "^7.0.0", "react-error-boundary": "^1.2.5", "react-markdown": "^8.0.3", - "rehype-raw": "^6.1.1", + "rehype-raw": "^7.0.0", "rehype-sanitize": "^6.0.0", "remark-gfm": "^3.0.1", "reselect": "^4.0.0", From 7727b9dc79d1e2ef0927145759d6362af22ec44c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 10:06:14 -0600 Subject: [PATCH 22/31] chore(deps-dev): bump eslint-import-resolver-typescript from 2.5.0 to 3.6.1 in /superset-frontend (#29435) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-frontend/package-lock.json | 93 +++++++++++++++++++++-------- superset-frontend/package.json | 2 +- 2 files changed, 70 insertions(+), 25 deletions(-) diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index caed62bb309e..3346ee793c2b 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -229,7 +229,7 @@ "eslint": "^8.56.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-prettier": "^7.2.0", - "eslint-import-resolver-typescript": "^2.5.0", + "eslint-import-resolver-typescript": "^3.6.1", "eslint-plugin-cypress": "^2.11.2", "eslint-plugin-file-progress": "^1.2.0", "eslint-plugin-import": "^2.24.2", @@ -34108,19 +34108,24 @@ "dev": true }, "node_modules/eslint-import-resolver-typescript": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.5.0.tgz", - "integrity": "sha512-qZ6e5CFr+I7K4VVhQu3M/9xGv9/YmwsEXrsm3nimw8vWaVHRDrQRp26BgCypTxBp3vUp4o5aVEJRiy0F2DFddQ==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.1.tgz", + "integrity": "sha512-xgdptdoi5W3niYeuQxKmzVDTATvLYqhpwmykwsh7f6HIOStGWEIL9iqZgQDF9u9OEzrRwR8no5q2VT+bjAujTg==", "dev": true, "dependencies": { - "debug": "^4.3.1", - "glob": "^7.1.7", - "is-glob": "^4.0.1", - "resolve": "^1.20.0", - "tsconfig-paths": "^3.9.0" + "debug": "^4.3.4", + "enhanced-resolve": "^5.12.0", + "eslint-module-utils": "^2.7.4", + "fast-glob": "^3.3.1", + "get-tsconfig": "^4.5.0", + "is-core-module": "^2.11.0", + "is-glob": "^4.0.3" }, "engines": { - "node": ">=4" + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts/projects/eslint-import-resolver-ts" }, "peerDependencies": { "eslint": "*", @@ -34128,9 +34133,9 @@ } }, "node_modules/eslint-import-resolver-typescript/node_modules/debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -34144,12 +34149,34 @@ } } }, + "node_modules/eslint-import-resolver-typescript/node_modules/enhanced-resolve": { + "version": "5.17.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.0.tgz", + "integrity": "sha512-dwDPwZL0dmye8Txp2gzFmA6sxALaSvdRDjPH0viLcKrtlOL3tw62nWWweVD1SdILDTJrbrL6tdWVN58Wo6U3eA==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/eslint-import-resolver-typescript/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node_modules/eslint-import-resolver-typescript/node_modules/tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/eslint-module-utils": { "version": "2.8.0", "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", @@ -100317,32 +100344,50 @@ } }, "eslint-import-resolver-typescript": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.5.0.tgz", - "integrity": "sha512-qZ6e5CFr+I7K4VVhQu3M/9xGv9/YmwsEXrsm3nimw8vWaVHRDrQRp26BgCypTxBp3vUp4o5aVEJRiy0F2DFddQ==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.1.tgz", + "integrity": "sha512-xgdptdoi5W3niYeuQxKmzVDTATvLYqhpwmykwsh7f6HIOStGWEIL9iqZgQDF9u9OEzrRwR8no5q2VT+bjAujTg==", "dev": true, "requires": { - "debug": "^4.3.1", - "glob": "^7.1.7", - "is-glob": "^4.0.1", - "resolve": "^1.20.0", - "tsconfig-paths": "^3.9.0" + "debug": "^4.3.4", + "enhanced-resolve": "^5.12.0", + "eslint-module-utils": "^2.7.4", + "fast-glob": "^3.3.1", + "get-tsconfig": "^4.5.0", + "is-core-module": "^2.11.0", + "is-glob": "^4.0.3" }, "dependencies": { "debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", "dev": true, "requires": { "ms": "2.1.2" } }, + "enhanced-resolve": { + "version": "5.17.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.0.tgz", + "integrity": "sha512-dwDPwZL0dmye8Txp2gzFmA6sxALaSvdRDjPH0viLcKrtlOL3tw62nWWweVD1SdILDTJrbrL6tdWVN58Wo6U3eA==", + "dev": true, + "requires": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + } + }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true + }, + "tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true } } }, diff --git a/superset-frontend/package.json b/superset-frontend/package.json index 34c8ebdd2140..9bdce10c7343 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -294,7 +294,7 @@ "eslint": "^8.56.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-prettier": "^7.2.0", - "eslint-import-resolver-typescript": "^2.5.0", + "eslint-import-resolver-typescript": "^3.6.1", "eslint-plugin-cypress": "^2.11.2", "eslint-plugin-file-progress": "^1.2.0", "eslint-plugin-import": "^2.24.2", From 0cf676b57402cc8bbe9ce46b0d5568370c0851fa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 11:01:32 -0600 Subject: [PATCH 23/31] chore(deps-dev): bump ts-jest from 29.1.2 to 29.1.5 in /superset-websocket (#29423) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-websocket/package-lock.json | 20 ++++++++++++-------- superset-websocket/package.json | 2 +- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/superset-websocket/package-lock.json b/superset-websocket/package-lock.json index d6c0e54292a1..a0653df947dc 100644 --- a/superset-websocket/package-lock.json +++ b/superset-websocket/package-lock.json @@ -35,7 +35,7 @@ "eslint-plugin-lodash": "^7.4.0", "jest": "^29.7.0", "prettier": "^3.2.5", - "ts-jest": "^29.1.2", + "ts-jest": "^29.1.5", "ts-node": "^10.9.2", "typescript": "^4.9.5" }, @@ -6317,9 +6317,9 @@ } }, "node_modules/ts-jest": { - "version": "29.1.2", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.2.tgz", - "integrity": "sha512-br6GJoH/WUX4pu7FbZXuWGKGNDuU7b8Uj77g/Sp7puZV6EXzuByl6JrECvm0MzVzSTkSHWTihsXt+5XYER5b+g==", + "version": "29.1.5", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.5.tgz", + "integrity": "sha512-UuClSYxM7byvvYfyWdFI+/2UxMmwNyJb0NPkZPQE2hew3RurV7l7zURgOHAd/1I1ZdPpe3GUsXNXAcN8TFKSIg==", "dev": true, "dependencies": { "bs-logger": "0.x", @@ -6335,10 +6335,11 @@ "ts-jest": "cli.js" }, "engines": { - "node": "^16.10.0 || ^18.0.0 || >=20.0.0" + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" }, "peerDependencies": { "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0", "@jest/types": "^29.0.0", "babel-jest": "^29.0.0", "jest": "^29.0.0", @@ -6348,6 +6349,9 @@ "@babel/core": { "optional": true }, + "@jest/transform": { + "optional": true + }, "@jest/types": { "optional": true }, @@ -11545,9 +11549,9 @@ "integrity": "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==" }, "ts-jest": { - "version": "29.1.2", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.2.tgz", - "integrity": "sha512-br6GJoH/WUX4pu7FbZXuWGKGNDuU7b8Uj77g/Sp7puZV6EXzuByl6JrECvm0MzVzSTkSHWTihsXt+5XYER5b+g==", + "version": "29.1.5", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.5.tgz", + "integrity": "sha512-UuClSYxM7byvvYfyWdFI+/2UxMmwNyJb0NPkZPQE2hew3RurV7l7zURgOHAd/1I1ZdPpe3GUsXNXAcN8TFKSIg==", "dev": true, "requires": { "bs-logger": "0.x", diff --git a/superset-websocket/package.json b/superset-websocket/package.json index bf481e00eae0..b3fd45441f9e 100644 --- a/superset-websocket/package.json +++ b/superset-websocket/package.json @@ -43,7 +43,7 @@ "eslint-plugin-lodash": "^7.4.0", "jest": "^29.7.0", "prettier": "^3.2.5", - "ts-jest": "^29.1.2", + "ts-jest": "^29.1.5", "ts-node": "^10.9.2", "typescript": "^4.9.5" }, From 7d6e933348576de375a742b879d01e11c5e92512 Mon Sep 17 00:00:00 2001 From: Ville Brofeldt <33317356+villebro@users.noreply.github.com> Date: Mon, 1 Jul 2024 20:22:11 +0300 Subject: [PATCH 24/31] chore(key-value): convert command to dao (#29344) --- .../commands/dashboard/permalink/create.py | 12 +- superset/commands/dashboard/permalink/get.py | 9 +- .../commands/distributed_lock/__init__.py | 0 .../commands/distributed_lock/base.py | 25 ++ superset/commands/distributed_lock/create.py | 64 +++ .../delete.py} | 53 +-- superset/commands/distributed_lock/get.py | 45 ++ superset/commands/explore/permalink/create.py | 16 +- superset/commands/explore/permalink/get.py | 8 +- superset/commands/key_value/create.py | 102 ----- superset/commands/key_value/delete.py | 63 --- superset/commands/key_value/get.py | 71 ---- superset/commands/key_value/update.py | 87 ---- superset/commands/key_value/upsert.py | 104 ----- superset/daos/key_value.py | 145 +++++++ .../lock.py => distributed_lock/__init__.py} | 71 +--- .../__init__.py => distributed_lock/types.py} | 5 + .../distributed_lock/utils.py | 29 ++ superset/exceptions.py | 6 + superset/extensions/metastore_cache.py | 49 +-- superset/key_value/shared_entries.py | 18 +- superset/key_value/types.py | 9 +- superset/key_value/utils.py | 4 +- superset/utils/oauth2.py | 2 +- .../explore/permalink/commands_tests.py | 9 +- .../extensions/metastore_cache_test.py | 1 + .../key_value/commands/create_test.py | 96 ----- .../key_value/commands/delete_test.py | 84 ---- .../key_value/commands/fixtures.py | 69 --- .../key_value/commands/get_test.py | 103 ----- .../key_value/commands/update_test.py | 97 ----- .../key_value/commands/upsert_test.py | 101 ----- tests/unit_tests/dao/key_value_test.py | 395 ++++++++++++++++++ tests/unit_tests/distributed_lock/__init__.py | 0 .../distributed_lock_tests.py} | 53 ++- tests/unit_tests/fixtures/common.py | 26 +- 36 files changed, 868 insertions(+), 1163 deletions(-) create mode 100644 superset/commands/distributed_lock/__init__.py rename tests/integration_tests/key_value/__init__.py => superset/commands/distributed_lock/base.py (54%) create mode 100644 superset/commands/distributed_lock/create.py rename superset/commands/{key_value/delete_expired.py => distributed_lock/delete.py} (51%) create mode 100644 superset/commands/distributed_lock/get.py delete mode 100644 superset/commands/key_value/create.py delete mode 100644 superset/commands/key_value/delete.py delete mode 100644 superset/commands/key_value/get.py delete mode 100644 superset/commands/key_value/update.py delete mode 100644 superset/commands/key_value/upsert.py create mode 100644 superset/daos/key_value.py rename superset/{utils/lock.py => distributed_lock/__init__.py} (55%) rename superset/{commands/key_value/__init__.py => distributed_lock/types.py} (91%) rename tests/integration_tests/key_value/commands/__init__.py => superset/distributed_lock/utils.py (52%) delete mode 100644 tests/integration_tests/key_value/commands/create_test.py delete mode 100644 tests/integration_tests/key_value/commands/delete_test.py delete mode 100644 tests/integration_tests/key_value/commands/fixtures.py delete mode 100644 tests/integration_tests/key_value/commands/get_test.py delete mode 100644 tests/integration_tests/key_value/commands/update_test.py delete mode 100644 tests/integration_tests/key_value/commands/upsert_test.py create mode 100644 tests/unit_tests/dao/key_value_test.py create mode 100644 tests/unit_tests/distributed_lock/__init__.py rename tests/unit_tests/{utils/lock_tests.py => distributed_lock/distributed_lock_tests.py} (51%) diff --git a/superset/commands/dashboard/permalink/create.py b/superset/commands/dashboard/permalink/create.py index 7d08f78e9a9b..20bc5118f576 100644 --- a/superset/commands/dashboard/permalink/create.py +++ b/superset/commands/dashboard/permalink/create.py @@ -19,9 +19,10 @@ from sqlalchemy.exc import SQLAlchemyError +from superset import db from superset.commands.dashboard.permalink.base import BaseDashboardPermalinkCommand -from superset.commands.key_value.upsert import UpsertKeyValueCommand from superset.daos.dashboard import DashboardDAO +from superset.daos.key_value import KeyValueDAO from superset.dashboards.permalink.exceptions import DashboardPermalinkCreateFailedError from superset.dashboards.permalink.types import DashboardPermalinkState from superset.key_value.exceptions import ( @@ -70,14 +71,15 @@ def run(self) -> str: "state": self.state, } user_id = get_user_id() - key = UpsertKeyValueCommand( + entry = KeyValueDAO.upsert_entry( resource=self.resource, key=get_deterministic_uuid(self.salt, (user_id, value)), value=value, codec=self.codec, - ).run() - assert key.id # for type checks - return encode_permalink_key(key=key.id, salt=self.salt) + ) + db.session.flush() + assert entry.id # for type checks + return encode_permalink_key(key=entry.id, salt=self.salt) def validate(self) -> None: pass diff --git a/superset/commands/dashboard/permalink/get.py b/superset/commands/dashboard/permalink/get.py index 32efa688813c..e87711a5bfeb 100644 --- a/superset/commands/dashboard/permalink/get.py +++ b/superset/commands/dashboard/permalink/get.py @@ -21,8 +21,8 @@ from superset.commands.dashboard.exceptions import DashboardNotFoundError from superset.commands.dashboard.permalink.base import BaseDashboardPermalinkCommand -from superset.commands.key_value.get import GetKeyValueCommand from superset.daos.dashboard import DashboardDAO +from superset.daos.key_value import KeyValueDAO from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError from superset.dashboards.permalink.types import DashboardPermalinkValue from superset.key_value.exceptions import ( @@ -43,12 +43,7 @@ def run(self) -> Optional[DashboardPermalinkValue]: self.validate() try: key = decode_permalink_id(self.key, salt=self.salt) - command = GetKeyValueCommand( - resource=self.resource, - key=key, - codec=self.codec, - ) - value: Optional[DashboardPermalinkValue] = command.run() + value = KeyValueDAO.get_value(self.resource, key, self.codec) if value: DashboardDAO.get_by_id_or_slug(value["dashboardId"]) return value diff --git a/superset/commands/distributed_lock/__init__.py b/superset/commands/distributed_lock/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/integration_tests/key_value/__init__.py b/superset/commands/distributed_lock/base.py similarity index 54% rename from tests/integration_tests/key_value/__init__.py rename to superset/commands/distributed_lock/base.py index 13a83393a912..322063f54e89 100644 --- a/tests/integration_tests/key_value/__init__.py +++ b/superset/commands/distributed_lock/base.py @@ -14,3 +14,28 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +import logging +import uuid +from typing import Any + +from flask import current_app + +from superset.commands.base import BaseCommand +from superset.distributed_lock.utils import get_key +from superset.key_value.types import JsonKeyValueCodec, KeyValueResource + +logger = logging.getLogger(__name__) +stats_logger = current_app.config["STATS_LOGGER"] + + +class BaseDistributedLockCommand(BaseCommand): + key: uuid.UUID + codec = JsonKeyValueCodec() + resource = KeyValueResource.LOCK + + def __init__(self, namespace: str, params: dict[str, Any] | None = None): + self.key = get_key(namespace, **(params or {})) + + def validate(self) -> None: + pass diff --git a/superset/commands/distributed_lock/create.py b/superset/commands/distributed_lock/create.py new file mode 100644 index 000000000000..c654089336af --- /dev/null +++ b/superset/commands/distributed_lock/create.py @@ -0,0 +1,64 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import logging +from datetime import datetime, timedelta +from functools import partial + +from flask import current_app +from sqlalchemy.exc import SQLAlchemyError + +from superset.commands.distributed_lock.base import BaseDistributedLockCommand +from superset.daos.key_value import KeyValueDAO +from superset.exceptions import CreateKeyValueDistributedLockFailedException +from superset.key_value.exceptions import ( + KeyValueCodecEncodeException, + KeyValueUpsertFailedError, +) +from superset.key_value.types import KeyValueResource +from superset.utils.decorators import on_error, transaction + +logger = logging.getLogger(__name__) +stats_logger = current_app.config["STATS_LOGGER"] + + +class CreateDistributedLock(BaseDistributedLockCommand): + lock_expiration = timedelta(seconds=30) + + def validate(self) -> None: + pass + + @transaction( + on_error=partial( + on_error, + catches=( + KeyValueCodecEncodeException, + KeyValueUpsertFailedError, + SQLAlchemyError, + ), + reraise=CreateKeyValueDistributedLockFailedException, + ), + ) + def run(self) -> None: + KeyValueDAO.delete_expired_entries(self.resource) + KeyValueDAO.create_entry( + resource=KeyValueResource.LOCK, + value={"value": True}, + codec=self.codec, + key=self.key, + expires_on=datetime.now() + self.lock_expiration, + ) diff --git a/superset/commands/key_value/delete_expired.py b/superset/commands/distributed_lock/delete.py similarity index 51% rename from superset/commands/key_value/delete_expired.py rename to superset/commands/distributed_lock/delete.py index 54991c7531d2..cd279dbe2409 100644 --- a/superset/commands/key_value/delete_expired.py +++ b/superset/commands/distributed_lock/delete.py @@ -14,49 +14,36 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + import logging -from datetime import datetime from functools import partial -from sqlalchemy import and_ +from flask import current_app +from sqlalchemy.exc import SQLAlchemyError -from superset import db -from superset.commands.base import BaseCommand +from superset.commands.distributed_lock.base import BaseDistributedLockCommand +from superset.daos.key_value import KeyValueDAO +from superset.exceptions import DeleteKeyValueDistributedLockFailedException from superset.key_value.exceptions import KeyValueDeleteFailedError -from superset.key_value.models import KeyValueEntry -from superset.key_value.types import KeyValueResource from superset.utils.decorators import on_error, transaction logger = logging.getLogger(__name__) +stats_logger = current_app.config["STATS_LOGGER"] -class DeleteExpiredKeyValueCommand(BaseCommand): - resource: KeyValueResource - - def __init__(self, resource: KeyValueResource): - """ - Delete all expired key-value pairs - - :param resource: the resource (dashboard, chart etc) - :return: was the entry deleted or not - """ - self.resource = resource - - @transaction(on_error=partial(on_error, reraise=KeyValueDeleteFailedError)) - def run(self) -> None: - self.delete_expired() - +class DeleteDistributedLock(BaseDistributedLockCommand): def validate(self) -> None: pass - def delete_expired(self) -> None: - ( - db.session.query(KeyValueEntry) - .filter( - and_( - KeyValueEntry.resource == self.resource.value, - KeyValueEntry.expires_on <= datetime.now(), - ) - ) - .delete() - ) + @transaction( + on_error=partial( + on_error, + catches=( + KeyValueDeleteFailedError, + SQLAlchemyError, + ), + reraise=DeleteKeyValueDistributedLockFailedException, + ), + ) + def run(self) -> None: + KeyValueDAO.delete_entry(self.resource, self.key) diff --git a/superset/commands/distributed_lock/get.py b/superset/commands/distributed_lock/get.py new file mode 100644 index 000000000000..562456410935 --- /dev/null +++ b/superset/commands/distributed_lock/get.py @@ -0,0 +1,45 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import logging +from typing import cast + +from flask import current_app + +from superset.commands.distributed_lock.base import BaseDistributedLockCommand +from superset.daos.key_value import KeyValueDAO +from superset.distributed_lock.types import LockValue + +logger = logging.getLogger(__name__) +stats_logger = current_app.config["STATS_LOGGER"] + + +class GetDistributedLock(BaseDistributedLockCommand): + def validate(self) -> None: + pass + + def run(self) -> LockValue | None: + entry = KeyValueDAO.get_entry( + resource=self.resource, + key=self.key, + ) + if not entry or entry.is_expired(): + return None + + return cast(LockValue, self.codec.decode(entry.value)) diff --git a/superset/commands/explore/permalink/create.py b/superset/commands/explore/permalink/create.py index 2128fa4b8c40..926b9ba919f4 100644 --- a/superset/commands/explore/permalink/create.py +++ b/superset/commands/explore/permalink/create.py @@ -20,8 +20,9 @@ from sqlalchemy.exc import SQLAlchemyError +from superset import db from superset.commands.explore.permalink.base import BaseExplorePermalinkCommand -from superset.commands.key_value.create import CreateKeyValueCommand +from superset.daos.key_value import KeyValueDAO from superset.explore.permalink.exceptions import ExplorePermalinkCreateFailedError from superset.explore.utils import check_access as check_chart_access from superset.key_value.exceptions import ( @@ -65,15 +66,12 @@ def run(self) -> str: "datasource": self.datasource, "state": self.state, } - command = CreateKeyValueCommand( - resource=self.resource, - value=value, - codec=self.codec, - ) - key = command.run() - if key.id is None: + entry = KeyValueDAO.create_entry(self.resource, value, self.codec) + db.session.flush() + key = entry.id + if key is None: raise ExplorePermalinkCreateFailedError("Unexpected missing key id") - return encode_permalink_key(key=key.id, salt=self.salt) + return encode_permalink_key(key=key, salt=self.salt) def validate(self) -> None: pass diff --git a/superset/commands/explore/permalink/get.py b/superset/commands/explore/permalink/get.py index 4c01db1ccab4..7dc1db40df24 100644 --- a/superset/commands/explore/permalink/get.py +++ b/superset/commands/explore/permalink/get.py @@ -21,7 +21,7 @@ from superset.commands.dataset.exceptions import DatasetNotFoundError from superset.commands.explore.permalink.base import BaseExplorePermalinkCommand -from superset.commands.key_value.get import GetKeyValueCommand +from superset.daos.key_value import KeyValueDAO from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError from superset.explore.permalink.types import ExplorePermalinkValue from superset.explore.utils import check_access as check_chart_access @@ -44,11 +44,7 @@ def run(self) -> Optional[ExplorePermalinkValue]: self.validate() try: key = decode_permalink_id(self.key, salt=self.salt) - value: Optional[ExplorePermalinkValue] = GetKeyValueCommand( - resource=self.resource, - key=key, - codec=self.codec, - ).run() + value = KeyValueDAO.get_value(self.resource, key, self.codec) if value: chart_id: Optional[int] = value.get("chartId") # keep this backward compatible for old permalinks diff --git a/superset/commands/key_value/create.py b/superset/commands/key_value/create.py deleted file mode 100644 index 81b7c4c3d4a9..000000000000 --- a/superset/commands/key_value/create.py +++ /dev/null @@ -1,102 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import logging -from datetime import datetime -from functools import partial -from typing import Any, Optional, Union -from uuid import UUID - -from superset import db -from superset.commands.base import BaseCommand -from superset.key_value.exceptions import KeyValueCreateFailedError -from superset.key_value.models import KeyValueEntry -from superset.key_value.types import Key, KeyValueCodec, KeyValueResource -from superset.utils.core import get_user_id -from superset.utils.decorators import on_error, transaction - -logger = logging.getLogger(__name__) - - -class CreateKeyValueCommand(BaseCommand): - resource: KeyValueResource - value: Any - codec: KeyValueCodec - key: Optional[Union[int, UUID]] - expires_on: Optional[datetime] - - def __init__( # pylint: disable=too-many-arguments - self, - resource: KeyValueResource, - value: Any, - codec: KeyValueCodec, - key: Optional[Union[int, UUID]] = None, - expires_on: Optional[datetime] = None, - ): - """ - Create a new key-value pair - - :param resource: the resource (dashboard, chart etc) - :param value: the value to persist in the key-value store - :param codec: codec used to encode the value - :param key: id of entry (autogenerated if undefined) - :param expires_on: entry expiration time - : - """ - self.resource = resource - self.value = value - self.codec = codec - self.key = key - self.expires_on = expires_on - - @transaction(on_error=partial(on_error, reraise=KeyValueCreateFailedError)) - def run(self) -> Key: - """ - Persist the value - - :return: the key associated with the persisted value - - """ - - return self.create() - - def validate(self) -> None: - pass - - def create(self) -> Key: - try: - value = self.codec.encode(self.value) - except Exception as ex: - raise KeyValueCreateFailedError("Unable to encode value") from ex - entry = KeyValueEntry( - resource=self.resource.value, - value=value, - created_on=datetime.now(), - created_by_fk=get_user_id(), - expires_on=self.expires_on, - ) - if self.key is not None: - try: - if isinstance(self.key, UUID): - entry.uuid = self.key - else: - entry.id = self.key - except ValueError as ex: - raise KeyValueCreateFailedError() from ex - - db.session.add(entry) - db.session.flush() - return Key(id=entry.id, uuid=entry.uuid) diff --git a/superset/commands/key_value/delete.py b/superset/commands/key_value/delete.py deleted file mode 100644 index a3fdf079c73c..000000000000 --- a/superset/commands/key_value/delete.py +++ /dev/null @@ -1,63 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import logging -from functools import partial -from typing import Union -from uuid import UUID - -from superset import db -from superset.commands.base import BaseCommand -from superset.key_value.exceptions import KeyValueDeleteFailedError -from superset.key_value.models import KeyValueEntry -from superset.key_value.types import KeyValueResource -from superset.key_value.utils import get_filter -from superset.utils.decorators import on_error, transaction - -logger = logging.getLogger(__name__) - - -class DeleteKeyValueCommand(BaseCommand): - key: Union[int, UUID] - resource: KeyValueResource - - def __init__(self, resource: KeyValueResource, key: Union[int, UUID]): - """ - Delete a key-value pair - - :param resource: the resource (dashboard, chart etc) - :param key: the key to delete - :return: was the entry deleted or not - """ - self.resource = resource - self.key = key - - @transaction(on_error=partial(on_error, reraise=KeyValueDeleteFailedError)) - def run(self) -> bool: - return self.delete() - - def validate(self) -> None: - pass - - def delete(self) -> bool: - if ( - entry := db.session.query(KeyValueEntry) - .filter_by(**get_filter(self.resource, self.key)) - .first() - ): - db.session.delete(entry) - return True - return False diff --git a/superset/commands/key_value/get.py b/superset/commands/key_value/get.py deleted file mode 100644 index 93550ee840c3..000000000000 --- a/superset/commands/key_value/get.py +++ /dev/null @@ -1,71 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import Any, Optional, Union -from uuid import UUID - -from sqlalchemy.exc import SQLAlchemyError - -from superset import db -from superset.commands.base import BaseCommand -from superset.key_value.exceptions import KeyValueGetFailedError -from superset.key_value.models import KeyValueEntry -from superset.key_value.types import KeyValueCodec, KeyValueResource -from superset.key_value.utils import get_filter - -logger = logging.getLogger(__name__) - - -class GetKeyValueCommand(BaseCommand): - resource: KeyValueResource - key: Union[int, UUID] - codec: KeyValueCodec - - def __init__( - self, - resource: KeyValueResource, - key: Union[int, UUID], - codec: KeyValueCodec, - ): - """ - Retrieve a key value entry - - :param resource: the resource (dashboard, chart etc) - :param key: the key to retrieve - :param codec: codec used to decode the value - :return: the value associated with the key if present - """ - self.resource = resource - self.key = key - self.codec = codec - - def run(self) -> Any: - try: - return self.get() - except SQLAlchemyError as ex: - raise KeyValueGetFailedError() from ex - - def validate(self) -> None: - pass - - def get(self) -> Optional[Any]: - filter_ = get_filter(self.resource, self.key) - entry = db.session.query(KeyValueEntry).filter_by(**filter_).first() - if entry and not entry.is_expired(): - return self.codec.decode(entry.value) - return None diff --git a/superset/commands/key_value/update.py b/superset/commands/key_value/update.py deleted file mode 100644 index b6ffc22174f6..000000000000 --- a/superset/commands/key_value/update.py +++ /dev/null @@ -1,87 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from datetime import datetime -from functools import partial -from typing import Any, Optional, Union -from uuid import UUID - -from superset import db -from superset.commands.base import BaseCommand -from superset.key_value.exceptions import KeyValueUpdateFailedError -from superset.key_value.models import KeyValueEntry -from superset.key_value.types import Key, KeyValueCodec, KeyValueResource -from superset.key_value.utils import get_filter -from superset.utils.core import get_user_id -from superset.utils.decorators import on_error, transaction - -logger = logging.getLogger(__name__) - - -class UpdateKeyValueCommand(BaseCommand): - resource: KeyValueResource - value: Any - codec: KeyValueCodec - key: Union[int, UUID] - expires_on: Optional[datetime] - - def __init__( # pylint: disable=too-many-arguments - self, - resource: KeyValueResource, - key: Union[int, UUID], - value: Any, - codec: KeyValueCodec, - expires_on: Optional[datetime] = None, - ): - """ - Update a key value entry - - :param resource: the resource (dashboard, chart etc) - :param key: the key to update - :param value: the value to persist in the key-value store - :param codec: codec used to encode the value - :param expires_on: entry expiration time - :return: the key associated with the updated value - """ - self.resource = resource - self.key = key - self.value = value - self.codec = codec - self.expires_on = expires_on - - @transaction(on_error=partial(on_error, reraise=KeyValueUpdateFailedError)) - def run(self) -> Optional[Key]: - return self.update() - - def validate(self) -> None: - pass - - def update(self) -> Optional[Key]: - filter_ = get_filter(self.resource, self.key) - entry: KeyValueEntry = ( - db.session.query(KeyValueEntry).filter_by(**filter_).first() - ) - if entry: - entry.value = self.codec.encode(self.value) - entry.expires_on = self.expires_on - entry.changed_on = datetime.now() - entry.changed_by_fk = get_user_id() - db.session.flush() - return Key(id=entry.id, uuid=entry.uuid) - - return None diff --git a/superset/commands/key_value/upsert.py b/superset/commands/key_value/upsert.py deleted file mode 100644 index 32918d9b1439..000000000000 --- a/superset/commands/key_value/upsert.py +++ /dev/null @@ -1,104 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from datetime import datetime -from functools import partial -from typing import Any, Optional, Union -from uuid import UUID - -from sqlalchemy.exc import SQLAlchemyError - -from superset import db -from superset.commands.base import BaseCommand -from superset.commands.key_value.create import CreateKeyValueCommand -from superset.key_value.exceptions import ( - KeyValueCreateFailedError, - KeyValueUpsertFailedError, -) -from superset.key_value.models import KeyValueEntry -from superset.key_value.types import Key, KeyValueCodec, KeyValueResource -from superset.key_value.utils import get_filter -from superset.utils.core import get_user_id -from superset.utils.decorators import on_error, transaction - -logger = logging.getLogger(__name__) - - -class UpsertKeyValueCommand(BaseCommand): - resource: KeyValueResource - value: Any - key: Union[int, UUID] - codec: KeyValueCodec - expires_on: Optional[datetime] - - def __init__( # pylint: disable=too-many-arguments - self, - resource: KeyValueResource, - key: Union[int, UUID], - value: Any, - codec: KeyValueCodec, - expires_on: Optional[datetime] = None, - ): - """ - Upsert a key value entry - - :param resource: the resource (dashboard, chart etc) - :param key: the key to update - :param value: the value to persist in the key-value store - :param codec: codec used to encode the value - :param expires_on: entry expiration time - :return: the key associated with the updated value - """ - self.resource = resource - self.key = key - self.value = value - self.codec = codec - self.expires_on = expires_on - - @transaction( - on_error=partial( - on_error, - catches=(KeyValueCreateFailedError, SQLAlchemyError), - reraise=KeyValueUpsertFailedError, - ), - ) - def run(self) -> Key: - return self.upsert() - - def validate(self) -> None: - pass - - def upsert(self) -> Key: - if ( - entry := db.session.query(KeyValueEntry) - .filter_by(**get_filter(self.resource, self.key)) - .first() - ): - entry.value = self.codec.encode(self.value) - entry.expires_on = self.expires_on - entry.changed_on = datetime.now() - entry.changed_by_fk = get_user_id() - return Key(entry.id, entry.uuid) - - return CreateKeyValueCommand( - resource=self.resource, - value=self.value, - codec=self.codec, - key=self.key, - expires_on=self.expires_on, - ).run() diff --git a/superset/daos/key_value.py b/superset/daos/key_value.py new file mode 100644 index 000000000000..f15293abcab8 --- /dev/null +++ b/superset/daos/key_value.py @@ -0,0 +1,145 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import logging +from datetime import datetime +from typing import Any +from uuid import UUID + +from sqlalchemy import and_ + +from superset import db +from superset.daos.base import BaseDAO +from superset.key_value.exceptions import ( + KeyValueCreateFailedError, + KeyValueUpdateFailedError, +) +from superset.key_value.models import KeyValueEntry +from superset.key_value.types import Key, KeyValueCodec, KeyValueResource +from superset.key_value.utils import get_filter +from superset.utils.core import get_user_id + +logger = logging.getLogger(__name__) + + +class KeyValueDAO(BaseDAO[KeyValueEntry]): + @staticmethod + def get_entry( + resource: KeyValueResource, + key: Key, + ) -> KeyValueEntry | None: + filter_ = get_filter(resource, key) + return db.session.query(KeyValueEntry).filter_by(**filter_).first() + + @classmethod + def get_value( + cls, + resource: KeyValueResource, + key: Key, + codec: KeyValueCodec, + ) -> Any: + entry = cls.get_entry(resource, key) + if not entry or entry.is_expired(): + return None + + return codec.decode(entry.value) + + @staticmethod + def delete_entry(resource: KeyValueResource, key: Key) -> bool: + if entry := KeyValueDAO.get_entry(resource, key): + db.session.delete(entry) + return True + + return False + + @staticmethod + def delete_expired_entries(resource: KeyValueResource) -> None: + ( + db.session.query(KeyValueEntry) + .filter( + and_( + KeyValueEntry.resource == resource.value, + KeyValueEntry.expires_on <= datetime.now(), + ) + ) + .delete() + ) + + @staticmethod + def create_entry( + resource: KeyValueResource, + value: Any, + codec: KeyValueCodec, + key: Key | None = None, + expires_on: datetime | None = None, + ) -> KeyValueEntry: + try: + encoded_value = codec.encode(value) + except Exception as ex: + raise KeyValueCreateFailedError("Unable to encode value") from ex + entry = KeyValueEntry( + resource=resource.value, + value=encoded_value, + created_on=datetime.now(), + created_by_fk=get_user_id(), + expires_on=expires_on, + ) + if key is not None: + try: + if isinstance(key, UUID): + entry.uuid = key + else: + entry.id = key + except ValueError as ex: + raise KeyValueCreateFailedError() from ex + db.session.add(entry) + return entry + + @staticmethod + def upsert_entry( + resource: KeyValueResource, + value: Any, + codec: KeyValueCodec, + key: Key, + expires_on: datetime | None = None, + ) -> KeyValueEntry: + if entry := KeyValueDAO.get_entry(resource, key): + entry.value = codec.encode(value) + entry.expires_on = expires_on + entry.changed_on = datetime.now() + entry.changed_by_fk = get_user_id() + return entry + + return KeyValueDAO.create_entry(resource, value, codec, key, expires_on) + + @staticmethod + def update_entry( + resource: KeyValueResource, + value: Any, + codec: KeyValueCodec, + key: Key, + expires_on: datetime | None = None, + ) -> KeyValueEntry: + if entry := KeyValueDAO.get_entry(resource, key): + entry.value = codec.encode(value) + entry.expires_on = expires_on + entry.changed_on = datetime.now() + entry.changed_by_fk = get_user_id() + return entry + + raise KeyValueUpdateFailedError() diff --git a/superset/utils/lock.py b/superset/distributed_lock/__init__.py similarity index 55% rename from superset/utils/lock.py rename to superset/distributed_lock/__init__.py index 4723b57fa1b0..c4af73ac0f09 100644 --- a/superset/utils/lock.py +++ b/superset/distributed_lock/__init__.py @@ -21,40 +21,18 @@ import uuid from collections.abc import Iterator from contextlib import contextmanager -from datetime import datetime, timedelta -from typing import Any, cast, TypeVar, Union +from datetime import timedelta +from typing import Any +from superset.distributed_lock.utils import get_key from superset.exceptions import CreateKeyValueDistributedLockFailedException -from superset.key_value.exceptions import KeyValueCreateFailedError from superset.key_value.types import JsonKeyValueCodec, KeyValueResource -from superset.utils import json -LOCK_EXPIRATION = timedelta(seconds=30) logger = logging.getLogger(__name__) - -def serialize(params: dict[str, Any]) -> str: - """ - Serialize parameters into a string. - """ - - T = TypeVar( - "T", - bound=Union[dict[str, Any], list[Any], int, float, str, bool, None], - ) - - def sort(obj: T) -> T: - if isinstance(obj, dict): - return cast(T, {k: sort(v) for k, v in sorted(obj.items())}) - if isinstance(obj, list): - return cast(T, [sort(x) for x in obj]) - return obj - - return json.dumps(params) - - -def get_key(namespace: str, **kwargs: Any) -> uuid.UUID: - return uuid.uuid5(uuid.uuid5(uuid.NAMESPACE_DNS, namespace), serialize(kwargs)) +CODEC = JsonKeyValueCodec() +LOCK_EXPIRATION = timedelta(seconds=30) +RESOURCE = KeyValueResource.LOCK @contextmanager @@ -75,28 +53,25 @@ def KeyValueDistributedLock( # pylint: disable=invalid-name :yields: A unique identifier (UUID) for the acquired lock (the KV key). :raises CreateKeyValueDistributedLockFailedException: If the lock is taken. """ + # pylint: disable=import-outside-toplevel - from superset.commands.key_value.create import CreateKeyValueCommand - from superset.commands.key_value.delete import DeleteKeyValueCommand - from superset.commands.key_value.delete_expired import DeleteExpiredKeyValueCommand + from superset.commands.distributed_lock.create import CreateDistributedLock + from superset.commands.distributed_lock.delete import DeleteDistributedLock + from superset.commands.distributed_lock.get import GetDistributedLock key = get_key(namespace, **kwargs) + value = GetDistributedLock(namespace=namespace, params=kwargs).run() + if value: + logger.debug("Lock on namespace %s for key %s already taken", namespace, key) + raise CreateKeyValueDistributedLockFailedException("Lock already taken") + logger.debug("Acquiring lock on namespace %s for key %s", namespace, key) try: - DeleteExpiredKeyValueCommand(resource=KeyValueResource.LOCK).run() - CreateKeyValueCommand( - resource=KeyValueResource.LOCK, - codec=JsonKeyValueCodec(), - key=key, - value=True, - expires_on=datetime.now() + LOCK_EXPIRATION, - ).run() - - yield key - - DeleteKeyValueCommand(resource=KeyValueResource.LOCK, key=key).run() - logger.debug("Removed lock on namespace %s for key %s", namespace, key) - except KeyValueCreateFailedError as ex: - raise CreateKeyValueDistributedLockFailedException( - "Error acquiring lock" - ) from ex + CreateDistributedLock(namespace=namespace, params=kwargs).run() + except CreateKeyValueDistributedLockFailedException as ex: + logger.debug("Lock on namespace %s for key %s already taken", namespace, key) + raise CreateKeyValueDistributedLockFailedException("Lock already taken") from ex + + yield key + DeleteDistributedLock(namespace=namespace, params=kwargs).run() + logger.debug("Removed lock on namespace %s for key %s", namespace, key) diff --git a/superset/commands/key_value/__init__.py b/superset/distributed_lock/types.py similarity index 91% rename from superset/commands/key_value/__init__.py rename to superset/distributed_lock/types.py index 13a83393a912..b714913e8e8b 100644 --- a/superset/commands/key_value/__init__.py +++ b/superset/distributed_lock/types.py @@ -14,3 +14,8 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from typing import TypedDict + + +class LockValue(TypedDict): + value: bool diff --git a/tests/integration_tests/key_value/commands/__init__.py b/superset/distributed_lock/utils.py similarity index 52% rename from tests/integration_tests/key_value/commands/__init__.py rename to superset/distributed_lock/utils.py index 13a83393a912..09ed12d704d9 100644 --- a/tests/integration_tests/key_value/commands/__init__.py +++ b/superset/distributed_lock/utils.py @@ -14,3 +14,32 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +import uuid +from typing import Any, cast, TypeVar, Union + +from superset.utils import json + + +def serialize(params: dict[str, Any]) -> str: + """ + Serialize parameters into a string. + """ + + T = TypeVar( + "T", + bound=Union[dict[str, Any], list[Any], int, float, str, bool, None], + ) + + def sort(obj: T) -> T: + if isinstance(obj, dict): + return cast(T, {k: sort(v) for k, v in sorted(obj.items())}) + if isinstance(obj, list): + return cast(T, [sort(x) for x in obj]) + return obj + + return json.dumps(params) + + +def get_key(namespace: str, **kwargs: Any) -> uuid.UUID: + return uuid.uuid5(uuid.uuid5(uuid.NAMESPACE_DNS, namespace), serialize(kwargs)) diff --git a/superset/exceptions.py b/superset/exceptions.py index 47cd511f8f20..dd669f5b72ae 100644 --- a/superset/exceptions.py +++ b/superset/exceptions.py @@ -379,6 +379,12 @@ class CreateKeyValueDistributedLockFailedException(Exception): """ +class DeleteKeyValueDistributedLockFailedException(Exception): + """ + Exception to signalize failure to delete lock. + """ + + class DatabaseNotFoundException(SupersetErrorException): status = 404 diff --git a/superset/extensions/metastore_cache.py b/superset/extensions/metastore_cache.py index 1c89e8459774..1195bd8edf65 100644 --- a/superset/extensions/metastore_cache.py +++ b/superset/extensions/metastore_cache.py @@ -21,7 +21,10 @@ from flask import current_app, Flask, has_app_context from flask_caching import BaseCache +from sqlalchemy.exc import SQLAlchemyError +from superset import db +from superset.daos.key_value import KeyValueDAO from superset.key_value.exceptions import KeyValueCreateFailedError from superset.key_value.types import ( KeyValueCodec, @@ -29,6 +32,7 @@ PickleKeyValueCodec, ) from superset.key_value.utils import get_uuid_namespace +from superset.utils.decorators import transaction RESOURCE = KeyValueResource.METASTORE_CACHE @@ -68,15 +72,6 @@ def factory( def get_key(self, key: str) -> UUID: return uuid3(self.namespace, key) - @staticmethod - def _prune() -> None: - # pylint: disable=import-outside-toplevel - from superset.commands.key_value.delete_expired import ( - DeleteExpiredKeyValueCommand, - ) - - DeleteExpiredKeyValueCommand(resource=RESOURCE).run() - def _get_expiry(self, timeout: Optional[int]) -> Optional[datetime]: timeout = self._normalize_timeout(timeout) if timeout is not None and timeout > 0: @@ -84,44 +79,34 @@ def _get_expiry(self, timeout: Optional[int]) -> Optional[datetime]: return None def set(self, key: str, value: Any, timeout: Optional[int] = None) -> bool: - # pylint: disable=import-outside-toplevel - from superset.commands.key_value.upsert import UpsertKeyValueCommand - - UpsertKeyValueCommand( + KeyValueDAO.upsert_entry( resource=RESOURCE, key=self.get_key(key), value=value, codec=self.codec, expires_on=self._get_expiry(timeout), - ).run() + ) + db.session.commit() # pylint: disable=consider-using-transaction return True def add(self, key: str, value: Any, timeout: Optional[int] = None) -> bool: - # pylint: disable=import-outside-toplevel - from superset.commands.key_value.create import CreateKeyValueCommand - try: - self._prune() - CreateKeyValueCommand( + KeyValueDAO.delete_expired_entries(RESOURCE) + KeyValueDAO.create_entry( resource=RESOURCE, value=value, codec=self.codec, key=self.get_key(key), expires_on=self._get_expiry(timeout), - ).run() + ) + db.session.commit() # pylint: disable=consider-using-transaction return True - except KeyValueCreateFailedError: + except (SQLAlchemyError, KeyValueCreateFailedError): + db.session.rollback() # pylint: disable=consider-using-transaction return False def get(self, key: str) -> Any: - # pylint: disable=import-outside-toplevel - from superset.commands.key_value.get import GetKeyValueCommand - - return GetKeyValueCommand( - resource=RESOURCE, - key=self.get_key(key), - codec=self.codec, - ).run() + return KeyValueDAO.get_value(RESOURCE, self.get_key(key), self.codec) def has(self, key: str) -> bool: entry = self.get(key) @@ -129,8 +114,6 @@ def has(self, key: str) -> bool: return True return False + @transaction() def delete(self, key: str) -> Any: - # pylint: disable=import-outside-toplevel - from superset.commands.key_value.delete import DeleteKeyValueCommand - - return DeleteKeyValueCommand(resource=RESOURCE, key=self.get_key(key)).run() + return KeyValueDAO.delete_entry(RESOURCE, self.get_key(key)) diff --git a/superset/key_value/shared_entries.py b/superset/key_value/shared_entries.py index 130313157a53..c2acafa80752 100644 --- a/superset/key_value/shared_entries.py +++ b/superset/key_value/shared_entries.py @@ -18,8 +18,10 @@ from typing import Any, Optional from uuid import uuid3 +from superset.daos.key_value import KeyValueDAO from superset.key_value.types import JsonKeyValueCodec, KeyValueResource, SharedKey from superset.key_value.utils import get_uuid_namespace, random_key +from superset.utils.decorators import transaction RESOURCE = KeyValueResource.APP NAMESPACE = get_uuid_namespace("") @@ -27,24 +29,14 @@ def get_shared_value(key: SharedKey) -> Optional[Any]: - # pylint: disable=import-outside-toplevel - from superset.commands.key_value.get import GetKeyValueCommand - uuid_key = uuid3(NAMESPACE, key) - return GetKeyValueCommand(RESOURCE, key=uuid_key, codec=CODEC).run() + return KeyValueDAO.get_value(RESOURCE, uuid_key, CODEC) +@transaction() def set_shared_value(key: SharedKey, value: Any) -> None: - # pylint: disable=import-outside-toplevel - from superset.commands.key_value.create import CreateKeyValueCommand - uuid_key = uuid3(NAMESPACE, key) - CreateKeyValueCommand( - resource=RESOURCE, - value=value, - key=uuid_key, - codec=CODEC, - ).run() + KeyValueDAO.create_entry(RESOURCE, value, CODEC, uuid_key) def get_permalink_salt(key: SharedKey) -> str: diff --git a/superset/key_value/types.py b/superset/key_value/types.py index 7b0130c0e6ce..f6459c330283 100644 --- a/superset/key_value/types.py +++ b/superset/key_value/types.py @@ -19,8 +19,7 @@ import json import pickle from abc import ABC, abstractmethod -from dataclasses import dataclass -from typing import Any, TypedDict +from typing import Any, TypedDict, Union from uuid import UUID from marshmallow import Schema, ValidationError @@ -31,11 +30,7 @@ ) from superset.utils.backports import StrEnum - -@dataclass -class Key: - id: int | None - uuid: UUID | None +Key = Union[int, UUID] class KeyValueFilter(TypedDict, total=False): diff --git a/superset/key_value/utils.py b/superset/key_value/utils.py index 1a22cfaa747b..0a4e63778aa6 100644 --- a/superset/key_value/utils.py +++ b/superset/key_value/utils.py @@ -25,7 +25,7 @@ from flask_babel import gettext as _ from superset.key_value.exceptions import KeyValueParseKeyError -from superset.key_value.types import KeyValueFilter, KeyValueResource +from superset.key_value.types import Key, KeyValueFilter, KeyValueResource from superset.utils.json import json_dumps_w_dates HASHIDS_MIN_LENGTH = 11 @@ -35,7 +35,7 @@ def random_key() -> str: return token_urlsafe(48) -def get_filter(resource: KeyValueResource, key: int | UUID) -> KeyValueFilter: +def get_filter(resource: KeyValueResource, key: Key) -> KeyValueFilter: try: filter_: KeyValueFilter = {"resource": resource.value} if isinstance(key, UUID): diff --git a/superset/utils/oauth2.py b/superset/utils/oauth2.py index bc4805fd8192..b889ef83c5e7 100644 --- a/superset/utils/oauth2.py +++ b/superset/utils/oauth2.py @@ -26,9 +26,9 @@ from marshmallow import EXCLUDE, fields, post_load, Schema from superset import db +from superset.distributed_lock import KeyValueDistributedLock from superset.exceptions import CreateKeyValueDistributedLockFailedException from superset.superset_typing import OAuth2ClientConfig, OAuth2State -from superset.utils.lock import KeyValueDistributedLock if TYPE_CHECKING: from superset.db_engine_specs.base import BaseEngineSpec diff --git a/tests/integration_tests/explore/permalink/commands_tests.py b/tests/integration_tests/explore/permalink/commands_tests.py index 4993e33f1895..c17d8bafdb00 100644 --- a/tests/integration_tests/explore/permalink/commands_tests.py +++ b/tests/integration_tests/explore/permalink/commands_tests.py @@ -133,11 +133,11 @@ def test_get_permalink_command(self, mock_g): assert cache_data.get("datasource") == datasource @patch("superset.security.manager.g") - @patch("superset.commands.key_value.get.GetKeyValueCommand.run") + @patch("superset.daos.key_value.KeyValueDAO.get_value") @patch("superset.commands.explore.permalink.get.decode_permalink_id") @pytest.mark.usefixtures("create_dataset", "create_slice") def test_get_permalink_command_with_old_dataset_key( - self, decode_id_mock, get_kv_command_mock, mock_g + self, decode_id_mock, kv_get_value_mock, mock_g ): mock_g.user = security_manager.find_user("admin") app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { @@ -149,13 +149,14 @@ def test_get_permalink_command_with_old_dataset_key( ) slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() - datasource_string = f"{dataset.id}__{DatasourceType.TABLE}" + datasource_string = f"{dataset.id}__{DatasourceType.TABLE.value}" decode_id_mock.return_value = "123456" - get_kv_command_mock.return_value = { + kv_get_value_mock.return_value = { "chartId": slice.id, "datasetId": dataset.id, "datasource": datasource_string, + "datasourceType": DatasourceType.TABLE.value, "state": { "formData": {"datasource": datasource_string, "slice_id": slice.id} }, diff --git a/tests/integration_tests/extensions/metastore_cache_test.py b/tests/integration_tests/extensions/metastore_cache_test.py index c69340a7a2a3..238e8fd46a50 100644 --- a/tests/integration_tests/extensions/metastore_cache_test.py +++ b/tests/integration_tests/extensions/metastore_cache_test.py @@ -60,6 +60,7 @@ def test_caching_flow(app_context: AppContext, cache: SupersetMetastoreCache) -> assert cache.has(FIRST_KEY) is False assert cache.add(FIRST_KEY, FIRST_KEY_INITIAL_VALUE) is True assert cache.has(FIRST_KEY) is True + assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE cache.set(SECOND_KEY, SECOND_VALUE) assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE assert cache.get(SECOND_KEY) == SECOND_VALUE diff --git a/tests/integration_tests/key_value/commands/create_test.py b/tests/integration_tests/key_value/commands/create_test.py deleted file mode 100644 index b18b9886d6ff..000000000000 --- a/tests/integration_tests/key_value/commands/create_test.py +++ /dev/null @@ -1,96 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import pickle - -import pytest -from flask.ctx import AppContext -from flask_appbuilder.security.sqla.models import User - -from superset.extensions import db -from superset.key_value.exceptions import KeyValueCreateFailedError -from superset.utils import json -from superset.utils.core import override_user -from tests.integration_tests.key_value.commands.fixtures import ( - admin, # noqa: F401 - JSON_CODEC, - JSON_VALUE, - PICKLE_CODEC, - PICKLE_VALUE, - RESOURCE, -) - - -def test_create_id_entry(app_context: AppContext, admin: User) -> None: # noqa: F811 - from superset.commands.key_value.create import CreateKeyValueCommand - from superset.key_value.models import KeyValueEntry - - with override_user(admin): - key = CreateKeyValueCommand( - resource=RESOURCE, - value=JSON_VALUE, - codec=JSON_CODEC, - ).run() - entry = db.session.query(KeyValueEntry).filter_by(id=key.id).one() - assert json.loads(entry.value) == JSON_VALUE - assert entry.created_by_fk == admin.id - db.session.delete(entry) - db.session.commit() - - -def test_create_uuid_entry(app_context: AppContext, admin: User) -> None: # noqa: F811 - from superset.commands.key_value.create import CreateKeyValueCommand - from superset.key_value.models import KeyValueEntry - - with override_user(admin): - key = CreateKeyValueCommand( - resource=RESOURCE, value=JSON_VALUE, codec=JSON_CODEC - ).run() - entry = db.session.query(KeyValueEntry).filter_by(uuid=key.uuid).one() - assert json.loads(entry.value) == JSON_VALUE - assert entry.created_by_fk == admin.id - db.session.delete(entry) - db.session.commit() - - -def test_create_fail_json_entry(app_context: AppContext, admin: User) -> None: # noqa: F811 - from superset.commands.key_value.create import CreateKeyValueCommand - - with pytest.raises(KeyValueCreateFailedError): - CreateKeyValueCommand( - resource=RESOURCE, - value=PICKLE_VALUE, - codec=JSON_CODEC, - ).run() - - -def test_create_pickle_entry(app_context: AppContext, admin: User) -> None: # noqa: F811 - from superset.commands.key_value.create import CreateKeyValueCommand - from superset.key_value.models import KeyValueEntry - - with override_user(admin): - key = CreateKeyValueCommand( - resource=RESOURCE, - value=PICKLE_VALUE, - codec=PICKLE_CODEC, - ).run() - entry = db.session.query(KeyValueEntry).filter_by(id=key.id).one() - assert type(pickle.loads(entry.value)) == type(PICKLE_VALUE) - assert entry.created_by_fk == admin.id - db.session.delete(entry) - db.session.commit() diff --git a/tests/integration_tests/key_value/commands/delete_test.py b/tests/integration_tests/key_value/commands/delete_test.py deleted file mode 100644 index b45a5d075d21..000000000000 --- a/tests/integration_tests/key_value/commands/delete_test.py +++ /dev/null @@ -1,84 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from typing import TYPE_CHECKING -from uuid import UUID - -import pytest -from flask.ctx import AppContext -from flask_appbuilder.security.sqla.models import User - -from superset.extensions import db -from superset.utils import json -from tests.integration_tests.key_value.commands.fixtures import ( - admin, # noqa: F401 - JSON_VALUE, - RESOURCE, -) - -if TYPE_CHECKING: - from superset.key_value.models import KeyValueEntry - -ID_KEY = 234 -UUID_KEY = UUID("5aae143c-44f1-478e-9153-ae6154df333a") - - -@pytest.fixture -def key_value_entry() -> KeyValueEntry: - from superset.key_value.models import KeyValueEntry - - entry = KeyValueEntry( - id=ID_KEY, - uuid=UUID_KEY, - resource=RESOURCE, - value=bytes(json.dumps(JSON_VALUE), encoding="utf-8"), - ) - db.session.add(entry) - db.session.flush() - return entry - - -def test_delete_id_entry( - app_context: AppContext, - admin: User, # noqa: F811 - key_value_entry: KeyValueEntry, -) -> None: - from superset.commands.key_value.delete import DeleteKeyValueCommand - - assert DeleteKeyValueCommand(resource=RESOURCE, key=ID_KEY).run() is True - db.session.commit() - - -def test_delete_uuid_entry( - app_context: AppContext, - admin: User, # noqa: F811 - key_value_entry: KeyValueEntry, -) -> None: - from superset.commands.key_value.delete import DeleteKeyValueCommand - - assert DeleteKeyValueCommand(resource=RESOURCE, key=UUID_KEY).run() is True - db.session.commit() - - -def test_delete_entry_missing( - app_context: AppContext, - admin: User, # noqa: F811 -) -> None: - from superset.commands.key_value.delete import DeleteKeyValueCommand - - assert DeleteKeyValueCommand(resource=RESOURCE, key=456).run() is False diff --git a/tests/integration_tests/key_value/commands/fixtures.py b/tests/integration_tests/key_value/commands/fixtures.py deleted file mode 100644 index 74bf809301c1..000000000000 --- a/tests/integration_tests/key_value/commands/fixtures.py +++ /dev/null @@ -1,69 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import annotations - -from collections.abc import Generator -from typing import TYPE_CHECKING -from uuid import UUID - -import pytest -from flask_appbuilder.security.sqla.models import User - -from superset.extensions import db -from superset.key_value.types import ( - JsonKeyValueCodec, - KeyValueResource, - PickleKeyValueCodec, -) -from superset.utils import json -from tests.integration_tests.test_app import app - -if TYPE_CHECKING: - from superset.key_value.models import KeyValueEntry - -ID_KEY = 123 -UUID_KEY = UUID("3e7a2ab8-bcaf-49b0-a5df-dfb432f291cc") -RESOURCE = KeyValueResource.APP -JSON_VALUE = {"foo": "bar"} -PICKLE_VALUE = object() -JSON_CODEC = JsonKeyValueCodec() -PICKLE_CODEC = PickleKeyValueCodec() - - -@pytest.fixture -def key_value_entry() -> Generator[KeyValueEntry, None, None]: - from superset.key_value.models import KeyValueEntry - - entry = KeyValueEntry( - id=ID_KEY, - uuid=UUID_KEY, - resource=RESOURCE, - value=bytes(json.dumps(JSON_VALUE), encoding="utf-8"), - ) - db.session.add(entry) - db.session.flush() - yield entry - db.session.delete(entry) - db.session.commit() - - -@pytest.fixture -def admin() -> User: - with app.app_context(): # noqa: F841 - admin = db.session.query(User).filter_by(username="admin").one() - return admin diff --git a/tests/integration_tests/key_value/commands/get_test.py b/tests/integration_tests/key_value/commands/get_test.py deleted file mode 100644 index 131b615b7c2e..000000000000 --- a/tests/integration_tests/key_value/commands/get_test.py +++ /dev/null @@ -1,103 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import uuid -from datetime import datetime, timedelta -from typing import TYPE_CHECKING - -from flask.ctx import AppContext - -from superset.extensions import db -from superset.utils import json -from tests.integration_tests.key_value.commands.fixtures import ( - ID_KEY, - JSON_CODEC, - JSON_VALUE, - key_value_entry, # noqa: F401 - RESOURCE, - UUID_KEY, -) - -if TYPE_CHECKING: - from superset.key_value.models import KeyValueEntry - - -def test_get_id_entry(app_context: AppContext, key_value_entry: KeyValueEntry) -> None: # noqa: F811 - from superset.commands.key_value.get import GetKeyValueCommand - - value = GetKeyValueCommand(resource=RESOURCE, key=ID_KEY, codec=JSON_CODEC).run() - assert value == JSON_VALUE - - -def test_get_uuid_entry( - app_context: AppContext, - key_value_entry: KeyValueEntry, # noqa: F811 -) -> None: - from superset.commands.key_value.get import GetKeyValueCommand - - value = GetKeyValueCommand(resource=RESOURCE, key=UUID_KEY, codec=JSON_CODEC).run() - assert value == JSON_VALUE - - -def test_get_id_entry_missing( - app_context: AppContext, - key_value_entry: KeyValueEntry, # noqa: F811 -) -> None: - from superset.commands.key_value.get import GetKeyValueCommand - - value = GetKeyValueCommand(resource=RESOURCE, key=456, codec=JSON_CODEC).run() - assert value is None - - -def test_get_expired_entry(app_context: AppContext) -> None: - from superset.commands.key_value.get import GetKeyValueCommand - from superset.key_value.models import KeyValueEntry - - entry = KeyValueEntry( - id=678, - uuid=uuid.uuid4(), - resource=RESOURCE, - value=bytes(json.dumps(JSON_VALUE), encoding="utf-8"), - expires_on=datetime.now() - timedelta(days=1), - ) - db.session.add(entry) - db.session.flush() - value = GetKeyValueCommand(resource=RESOURCE, key=ID_KEY, codec=JSON_CODEC).run() - assert value is None - db.session.delete(entry) - db.session.commit() - - -def test_get_future_expiring_entry(app_context: AppContext) -> None: - from superset.commands.key_value.get import GetKeyValueCommand - from superset.key_value.models import KeyValueEntry - - id_ = 789 - entry = KeyValueEntry( - id=id_, - uuid=uuid.uuid4(), - resource=RESOURCE, - value=bytes(json.dumps(JSON_VALUE), encoding="utf-8"), - expires_on=datetime.now() + timedelta(days=1), - ) - db.session.add(entry) - db.session.flush() - value = GetKeyValueCommand(resource=RESOURCE, key=id_, codec=JSON_CODEC).run() - assert value == JSON_VALUE - db.session.delete(entry) - db.session.commit() diff --git a/tests/integration_tests/key_value/commands/update_test.py b/tests/integration_tests/key_value/commands/update_test.py deleted file mode 100644 index bb434ec3b98b..000000000000 --- a/tests/integration_tests/key_value/commands/update_test.py +++ /dev/null @@ -1,97 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from typing import TYPE_CHECKING - -from flask.ctx import AppContext -from flask_appbuilder.security.sqla.models import User - -from superset.extensions import db -from superset.utils import json -from superset.utils.core import override_user -from tests.integration_tests.key_value.commands.fixtures import ( - admin, # noqa: F401 - ID_KEY, - JSON_CODEC, - key_value_entry, # noqa: F401 - RESOURCE, - UUID_KEY, -) - -if TYPE_CHECKING: - from superset.key_value.models import KeyValueEntry - - -NEW_VALUE = "new value" - - -def test_update_id_entry( - app_context: AppContext, - admin: User, # noqa: F811 - key_value_entry: KeyValueEntry, # noqa: F811 -) -> None: - from superset.commands.key_value.update import UpdateKeyValueCommand - from superset.key_value.models import KeyValueEntry - - with override_user(admin): - key = UpdateKeyValueCommand( - resource=RESOURCE, - key=ID_KEY, - value=NEW_VALUE, - codec=JSON_CODEC, - ).run() - assert key is not None - assert key.id == ID_KEY - entry = db.session.query(KeyValueEntry).filter_by(id=ID_KEY).one() - assert json.loads(entry.value) == NEW_VALUE - assert entry.changed_by_fk == admin.id - - -def test_update_uuid_entry( - app_context: AppContext, - admin: User, # noqa: F811 - key_value_entry: KeyValueEntry, # noqa: F811 -) -> None: - from superset.commands.key_value.update import UpdateKeyValueCommand - from superset.key_value.models import KeyValueEntry - - with override_user(admin): - key = UpdateKeyValueCommand( - resource=RESOURCE, - key=UUID_KEY, - value=NEW_VALUE, - codec=JSON_CODEC, - ).run() - assert key is not None - assert key.uuid == UUID_KEY - entry = db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).one() - assert json.loads(entry.value) == NEW_VALUE - assert entry.changed_by_fk == admin.id - - -def test_update_missing_entry(app_context: AppContext, admin: User) -> None: # noqa: F811 - from superset.commands.key_value.update import UpdateKeyValueCommand - - with override_user(admin): - key = UpdateKeyValueCommand( - resource=RESOURCE, - key=456, - value=NEW_VALUE, - codec=JSON_CODEC, - ).run() - assert key is None diff --git a/tests/integration_tests/key_value/commands/upsert_test.py b/tests/integration_tests/key_value/commands/upsert_test.py deleted file mode 100644 index 6ff61423f1a7..000000000000 --- a/tests/integration_tests/key_value/commands/upsert_test.py +++ /dev/null @@ -1,101 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from typing import TYPE_CHECKING - -from flask.ctx import AppContext -from flask_appbuilder.security.sqla.models import User - -from superset.extensions import db -from superset.utils import json -from superset.utils.core import override_user -from tests.integration_tests.key_value.commands.fixtures import ( - admin, # noqa: F401 - ID_KEY, - JSON_CODEC, - key_value_entry, # noqa: F401 - RESOURCE, - UUID_KEY, -) - -if TYPE_CHECKING: - from superset.key_value.models import KeyValueEntry - - -NEW_VALUE = "new value" - - -def test_upsert_id_entry( - app_context: AppContext, - admin: User, # noqa: F811 - key_value_entry: KeyValueEntry, # noqa: F811 -) -> None: - from superset.commands.key_value.upsert import UpsertKeyValueCommand - from superset.key_value.models import KeyValueEntry - - with override_user(admin): - key = UpsertKeyValueCommand( - resource=RESOURCE, - key=ID_KEY, - value=NEW_VALUE, - codec=JSON_CODEC, - ).run() - assert key is not None - assert key.id == ID_KEY - entry = db.session.query(KeyValueEntry).filter_by(id=int(ID_KEY)).one() - assert json.loads(entry.value) == NEW_VALUE - assert entry.changed_by_fk == admin.id - - -def test_upsert_uuid_entry( - app_context: AppContext, - admin: User, # noqa: F811 - key_value_entry: KeyValueEntry, # noqa: F811 -) -> None: - from superset.commands.key_value.upsert import UpsertKeyValueCommand - from superset.key_value.models import KeyValueEntry - - with override_user(admin): - key = UpsertKeyValueCommand( - resource=RESOURCE, - key=UUID_KEY, - value=NEW_VALUE, - codec=JSON_CODEC, - ).run() - assert key is not None - assert key.uuid == UUID_KEY - entry = db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).one() - assert json.loads(entry.value) == NEW_VALUE - assert entry.changed_by_fk == admin.id - - -def test_upsert_missing_entry(app_context: AppContext, admin: User) -> None: # noqa: F811 - from superset.commands.key_value.upsert import UpsertKeyValueCommand - from superset.key_value.models import KeyValueEntry - - with override_user(admin): - key = UpsertKeyValueCommand( - resource=RESOURCE, - key=456, - value=NEW_VALUE, - codec=JSON_CODEC, - ).run() - assert key is not None - assert key.id == 456 - db.session.query(KeyValueEntry).filter_by(id=456).delete() - db.session.commit() diff --git a/tests/unit_tests/dao/key_value_test.py b/tests/unit_tests/dao/key_value_test.py new file mode 100644 index 000000000000..18c0dfb25f94 --- /dev/null +++ b/tests/unit_tests/dao/key_value_test.py @@ -0,0 +1,395 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, unused-import +from __future__ import annotations + +import pickle +from datetime import datetime, timedelta +from typing import Generator, TYPE_CHECKING +from uuid import UUID + +import pytest +from flask.ctx import AppContext +from flask_appbuilder.security.sqla.models import User + +from superset.extensions import db +from superset.key_value.exceptions import ( + KeyValueCreateFailedError, + KeyValueUpdateFailedError, +) +from superset.key_value.types import ( + JsonKeyValueCodec, + KeyValueResource, + PickleKeyValueCodec, +) +from superset.utils import json +from superset.utils.core import override_user +from tests.unit_tests.fixtures.common import admin_user, after_each # noqa: F401 + +if TYPE_CHECKING: + from superset.key_value.models import KeyValueEntry + +ID_KEY = 123 +UUID_KEY = UUID("3e7a2ab8-bcaf-49b0-a5df-dfb432f291cc") +RESOURCE = KeyValueResource.APP +JSON_VALUE = {"foo": "bar"} +PICKLE_VALUE = object() +JSON_CODEC = JsonKeyValueCodec() +PICKLE_CODEC = PickleKeyValueCodec() +NEW_VALUE = {"foo": "baz"} + + +@pytest.fixture +def key_value_entry() -> Generator[KeyValueEntry, None, None]: + from superset.key_value.models import KeyValueEntry + + entry = KeyValueEntry( + id=ID_KEY, + uuid=UUID_KEY, + resource=RESOURCE, + value=JSON_CODEC.encode(JSON_VALUE), + ) + db.session.add(entry) + db.session.flush() + yield entry + + +def test_create_id_entry( + app_context: AppContext, + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + from superset.key_value.models import KeyValueEntry + + with override_user(admin_user): + created_entry = KeyValueDAO.create_entry( + resource=RESOURCE, + value=JSON_VALUE, + codec=JSON_CODEC, + ) + db.session.flush() + found_entry = ( + db.session.query(KeyValueEntry).filter_by(id=created_entry.id).one() + ) + assert json.loads(found_entry.value) == JSON_VALUE + assert found_entry.created_by_fk == admin_user.id + + +def test_create_uuid_entry( + app_context: AppContext, + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + from superset.key_value.models import KeyValueEntry + + with override_user(admin_user): + created_entry = KeyValueDAO.create_entry( + resource=RESOURCE, value=JSON_VALUE, codec=JSON_CODEC + ) + db.session.flush() + + found_entry = ( + db.session.query(KeyValueEntry).filter_by(uuid=created_entry.uuid).one() + ) + assert json.loads(found_entry.value) == JSON_VALUE + assert found_entry.created_by_fk == admin_user.id + + +def test_create_fail_json_entry( + app_context: AppContext, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + with pytest.raises(KeyValueCreateFailedError): + KeyValueDAO.create_entry( + resource=RESOURCE, + value=PICKLE_VALUE, + codec=JSON_CODEC, + ) + + +def test_create_pickle_entry( + app_context: AppContext, + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + from superset.key_value.models import KeyValueEntry + + with override_user(admin_user): + created_entry = KeyValueDAO.create_entry( + resource=RESOURCE, + value=PICKLE_VALUE, + codec=PICKLE_CODEC, + ) + db.session.flush() + found_entry = ( + db.session.query(KeyValueEntry).filter_by(id=created_entry.id).one() + ) + assert type(pickle.loads(found_entry.value)) == type(PICKLE_VALUE) + assert found_entry.created_by_fk == admin_user.id + + +def test_get_value( + app_context: AppContext, + key_value_entry: KeyValueEntry, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + value = KeyValueDAO.get_value( + resource=RESOURCE, + key=key_value_entry.id, + codec=JSON_CODEC, + ) + assert value == JSON_VALUE + + +def test_get_id_entry( + app_context: AppContext, + key_value_entry: KeyValueEntry, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + found_entry = KeyValueDAO.get_entry(resource=RESOURCE, key=key_value_entry.id) + assert found_entry is not None + assert found_entry.id == key_value_entry.id + + +def test_get_uuid_entry( + app_context: AppContext, + key_value_entry: KeyValueEntry, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + found_entry = KeyValueDAO.get_entry(resource=RESOURCE, key=key_value_entry.uuid) + assert found_entry is not None + assert JSON_CODEC.decode(found_entry.value) == JSON_VALUE + + +def test_get_id_entry_missing( + app_context: AppContext, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + entry = KeyValueDAO.get_entry(resource=RESOURCE, key=456) + assert entry is None + + +def test_get_expired_entry( + app_context: AppContext, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + created_entry = KeyValueDAO.create_entry( + resource=RESOURCE, + value=JSON_VALUE, + codec=JSON_CODEC, + key=ID_KEY, + expires_on=datetime.now() - timedelta(days=1), + ) + found_entry = KeyValueDAO.get_entry(resource=RESOURCE, key=created_entry.id) + assert found_entry is not None + assert found_entry.is_expired() is True + + +def test_get_future_expiring_entry( + app_context: AppContext, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + created_entry = KeyValueDAO.create_entry( + resource=RESOURCE, + value=JSON_VALUE, + codec=JSON_CODEC, + key=ID_KEY, + expires_on=datetime.now() + timedelta(days=1), + ) + found_entry = KeyValueDAO.get_entry(resource=RESOURCE, key=created_entry.id) + assert found_entry is not None + assert found_entry.is_expired() is False + + +def test_update_id_entry( + app_context: AppContext, + key_value_entry: KeyValueEntry, # noqa: F811 + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + with override_user(admin_user): + updated_entry = KeyValueDAO.update_entry( + resource=RESOURCE, + key=ID_KEY, + value=NEW_VALUE, + codec=JSON_CODEC, + ) + db.session.flush() + assert updated_entry is not None + assert JSON_CODEC.decode(updated_entry.value) == NEW_VALUE + assert updated_entry.id == ID_KEY + assert updated_entry.uuid == UUID_KEY + found_entry = KeyValueDAO.get_entry(resource=RESOURCE, key=ID_KEY) + assert found_entry is not None + assert JSON_CODEC.decode(found_entry.value) == NEW_VALUE + assert found_entry.changed_by_fk == admin_user.id + + +def test_update_uuid_entry( + app_context: AppContext, + key_value_entry: KeyValueEntry, # noqa: F811 + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + with override_user(admin_user): + updated_entry = KeyValueDAO.update_entry( + resource=RESOURCE, + key=UUID_KEY, + value=NEW_VALUE, + codec=JSON_CODEC, + ) + db.session.flush() + assert updated_entry is not None + assert JSON_CODEC.decode(updated_entry.value) == NEW_VALUE + assert updated_entry.id == ID_KEY + assert updated_entry.uuid == UUID_KEY + found_entry = KeyValueDAO.get_entry(resource=RESOURCE, key=UUID_KEY) + assert found_entry is not None + assert JSON_CODEC.decode(found_entry.value) == NEW_VALUE + assert found_entry.changed_by_fk == admin_user.id + + +def test_update_missing_entry( + app_context: AppContext, + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + with override_user(admin_user): + with pytest.raises(KeyValueUpdateFailedError): + KeyValueDAO.update_entry( + resource=RESOURCE, + key=456, + value=NEW_VALUE, + codec=JSON_CODEC, + ) + + +def test_upsert_id_entry( + app_context: AppContext, + key_value_entry: KeyValueEntry, # noqa: F811 + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + with override_user(admin_user): + entry = KeyValueDAO.upsert_entry( + resource=RESOURCE, + key=ID_KEY, + value=NEW_VALUE, + codec=JSON_CODEC, + ) + found_entry = KeyValueDAO.get_entry(resource=RESOURCE, key=ID_KEY) + assert found_entry is not None + assert JSON_CODEC.decode(found_entry.value) == NEW_VALUE + assert entry.changed_by_fk == admin_user.id + + +def test_upsert_uuid_entry( + app_context: AppContext, + key_value_entry: KeyValueEntry, # noqa: F811 + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + with override_user(admin_user): + entry = KeyValueDAO.upsert_entry( + resource=RESOURCE, + key=UUID_KEY, + value=NEW_VALUE, + codec=JSON_CODEC, + ) + db.session.flush() + assert entry is not None + assert entry.id == ID_KEY + assert entry.uuid == UUID_KEY + found_entry = KeyValueDAO.get_entry(resource=RESOURCE, key=UUID_KEY) + assert found_entry is not None + assert JSON_CODEC.decode(found_entry.value) == NEW_VALUE + assert entry.changed_by_fk == admin_user.id + + +def test_upsert_missing_entry( + app_context: AppContext, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + entry = KeyValueDAO.get_entry(resource=RESOURCE, key=ID_KEY) + assert entry is None + KeyValueDAO.upsert_entry( + resource=RESOURCE, + key=ID_KEY, + value=NEW_VALUE, + codec=JSON_CODEC, + ) + entry = KeyValueDAO.get_entry(resource=RESOURCE, key=ID_KEY) + assert entry is not None + assert JSON_CODEC.decode(entry.value) == NEW_VALUE + + +def test_delete_id_entry( + app_context: AppContext, + key_value_entry: KeyValueEntry, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + assert KeyValueDAO.delete_entry(resource=RESOURCE, key=ID_KEY) is True + + +def test_delete_uuid_entry( + app_context: AppContext, + key_value_entry: KeyValueEntry, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + assert KeyValueDAO.delete_entry(resource=RESOURCE, key=UUID_KEY) is True + + +def test_delete_entry_missing( + app_context: AppContext, + after_each: None, # noqa: F811 +) -> None: + from superset.daos.key_value import KeyValueDAO + + assert KeyValueDAO.delete_entry(resource=RESOURCE, key=12345678) is False diff --git a/tests/unit_tests/distributed_lock/__init__.py b/tests/unit_tests/distributed_lock/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/unit_tests/utils/lock_tests.py b/tests/unit_tests/distributed_lock/distributed_lock_tests.py similarity index 51% rename from tests/unit_tests/utils/lock_tests.py rename to tests/unit_tests/distributed_lock/distributed_lock_tests.py index 4c9121fe3874..6fe363f0978d 100644 --- a/tests/unit_tests/utils/lock_tests.py +++ b/tests/unit_tests/distributed_lock/distributed_lock_tests.py @@ -22,17 +22,21 @@ import pytest from freezegun import freeze_time +from sqlalchemy.orm import Session, sessionmaker from superset import db +from superset.distributed_lock import KeyValueDistributedLock +from superset.distributed_lock.types import LockValue +from superset.distributed_lock.utils import get_key from superset.exceptions import CreateKeyValueDistributedLockFailedException from superset.key_value.types import JsonKeyValueCodec -from superset.utils.lock import get_key, KeyValueDistributedLock +LOCK_VALUE: LockValue = {"value": True} MAIN_KEY = get_key("ns", a=1, b=2) OTHER_KEY = get_key("ns2", a=1, b=2) -def _get_lock(key: UUID) -> Any: +def _get_lock(key: UUID, session: Session) -> Any: from superset.key_value.models import KeyValueEntry entry = db.session.query(KeyValueEntry).filter_by(uuid=key).first() @@ -42,41 +46,56 @@ def _get_lock(key: UUID) -> Any: return JsonKeyValueCodec().decode(entry.value) +def _get_other_session() -> Session: + # This session is used to simulate what another worker will find in the metastore + # during the locking process. + from superset import db + + bind = db.session.get_bind() + SessionMaker = sessionmaker(bind=bind) + return SessionMaker() + + def test_key_value_distributed_lock_happy_path() -> None: """ Test successfully acquiring and returning the distributed lock. - Note we use a nested transaction to ensure that the cleanup from the outer context - manager is correctly invoked, otherwise a partial rollback would occur leaving the - database in a fractured state. + Note, we're using another session for asserting the lock state in the Metastore + to simulate what another worker will observe. Otherwise, there's the risk that + the assertions would only be using the non-committed state from the main session. """ + session = _get_other_session() with freeze_time("2021-01-01"): - assert _get_lock(MAIN_KEY) is None + assert _get_lock(MAIN_KEY, session) is None with KeyValueDistributedLock("ns", a=1, b=2) as key: assert key == MAIN_KEY - assert _get_lock(key) is True - assert _get_lock(OTHER_KEY) is None + assert _get_lock(key, session) == LOCK_VALUE + assert _get_lock(OTHER_KEY, session) is None - with db.session.begin_nested(): - with pytest.raises(CreateKeyValueDistributedLockFailedException): - with KeyValueDistributedLock("ns", a=1, b=2): - pass + with pytest.raises(CreateKeyValueDistributedLockFailedException): + with KeyValueDistributedLock("ns", a=1, b=2): + pass - assert _get_lock(MAIN_KEY) is None + assert _get_lock(MAIN_KEY, session) is None def test_key_value_distributed_lock_expired() -> None: """ Test expiration of the distributed lock + + Note, we're using another session for asserting the lock state in the Metastore + to simulate what another worker will observe. Otherwise, there's the risk that + the assertions would only be using the non-committed state from the main session. """ + session = _get_other_session() with freeze_time("2021-01-01"): - assert _get_lock(MAIN_KEY) is None + assert _get_lock(MAIN_KEY, session) is None with KeyValueDistributedLock("ns", a=1, b=2): - assert _get_lock(MAIN_KEY) is True + assert _get_lock(MAIN_KEY, session) == LOCK_VALUE with freeze_time("2022-01-01"): - assert _get_lock(MAIN_KEY) is None + assert _get_lock(MAIN_KEY, session) is None - assert _get_lock(MAIN_KEY) is None + assert _get_lock(MAIN_KEY, session) is None diff --git a/tests/unit_tests/fixtures/common.py b/tests/unit_tests/fixtures/common.py index 5aea8472c04a..4ee1d9d0ee34 100644 --- a/tests/unit_tests/fixtures/common.py +++ b/tests/unit_tests/fixtures/common.py @@ -20,12 +20,15 @@ import csv from datetime import datetime from io import BytesIO, StringIO -from typing import Any +from typing import Any, Generator import pandas as pd import pytest +from flask_appbuilder.security.sqla.models import Role, User from werkzeug.datastructures import FileStorage +from superset import db + @pytest.fixture def dttm() -> datetime: @@ -73,3 +76,24 @@ def create_columnar_file( df.to_parquet(buffer, index=False) buffer.seek(0) return FileStorage(stream=buffer, filename=filename) + + +@pytest.fixture +def admin_user() -> Generator[User, None, None]: + role = db.session.query(Role).filter_by(name="Admin").one() + user = User( + first_name="Alice", + last_name="Admin", + email="alice_admin@example.org", + username="alice_admin", + roles=[role], + ) + db.session.add(user) + db.session.flush() + yield user + + +@pytest.fixture +def after_each() -> Generator[None, None, None]: + yield + db.session.rollback() From 028665030f7851f3db4e90ee5d483ddb09bf384c Mon Sep 17 00:00:00 2001 From: saghatelian <43491361+saghatelian@users.noreply.github.com> Date: Mon, 1 Jul 2024 21:47:08 +0400 Subject: [PATCH 25/31] chore: Added 10Web to the list of organizations that use Apache Superset (#29442) --- RESOURCES/INTHEWILD.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RESOURCES/INTHEWILD.md b/RESOURCES/INTHEWILD.md index da918ef52a36..8063dc388f94 100644 --- a/RESOURCES/INTHEWILD.md +++ b/RESOURCES/INTHEWILD.md @@ -178,6 +178,7 @@ Join our growing community! - [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke] ### Others +- [10Web](https://10web.io/) - [AI inside](https://inside.ai/en/) - [Automattic](https://automattic.com/) [@Khrol, @Usiel] - [Dropbox](https://www.dropbox.com/) [@bkyryliuk] From 446a3b22dcbe3528db977c5dfdb83be123615e42 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Mon, 1 Jul 2024 13:19:25 -0700 Subject: [PATCH 26/31] chore: move all GHAs to ubuntu-22.04 (#29447) --- .github/workflows/bump-python-package.yml | 2 +- .github/workflows/cancel_duplicates.yml | 2 +- .github/workflows/check_db_migration_confict.yml | 2 +- .github/workflows/dependency-review.yml | 2 +- .github/workflows/docker.yml | 4 ++-- .github/workflows/embedded-sdk-release.yml | 4 ++-- .github/workflows/embedded-sdk-test.yml | 2 +- .github/workflows/ephemeral-env-pr-close.yml | 4 ++-- .github/workflows/ephemeral-env.yml | 8 ++++---- .github/workflows/generate-FOSSA-report.yml | 4 ++-- .github/workflows/github-action-validator.yml | 2 +- .github/workflows/issue_creation.yml | 2 +- .github/workflows/labeler.yml | 2 +- .github/workflows/latest-release-tag.yml | 2 +- .github/workflows/license-check.yml | 2 +- .github/workflows/no-hold-label.yml | 2 +- .github/workflows/pr-lint.yml | 2 +- .github/workflows/pre-commit.yml | 2 +- .github/workflows/prefer-typescript.yml | 2 +- .github/workflows/release.yml | 4 ++-- .github/workflows/superset-applitool-cypress.yml | 4 ++-- .github/workflows/superset-applitools-storybook.yml | 4 ++-- .github/workflows/superset-cli.yml | 2 +- .github/workflows/superset-docs-deploy.yml | 4 ++-- .github/workflows/superset-docs-verify.yml | 2 +- .github/workflows/superset-e2e.yml | 2 +- .github/workflows/superset-frontend.yml | 2 +- .github/workflows/superset-helm-lint.yml | 2 +- .github/workflows/superset-helm-release.yml | 2 +- .github/workflows/superset-python-integrationtest.yml | 6 +++--- .github/workflows/superset-python-misc.yml | 4 ++-- .github/workflows/superset-python-presto-hive.yml | 4 ++-- .github/workflows/superset-python-unittest.yml | 2 +- .github/workflows/superset-translations.yml | 4 ++-- .github/workflows/superset-websocket.yml | 2 +- .github/workflows/supersetbot.yml | 2 +- .github/workflows/tag-release.yml | 4 ++-- .github/workflows/tech-debt.yml | 4 ++-- .github/workflows/welcome-new-users.yml | 2 +- 39 files changed, 57 insertions(+), 57 deletions(-) diff --git a/.github/workflows/bump-python-package.yml b/.github/workflows/bump-python-package.yml index 336e5235e418..846291828abb 100644 --- a/.github/workflows/bump-python-package.yml +++ b/.github/workflows/bump-python-package.yml @@ -17,7 +17,7 @@ on: jobs: bump-python-package: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: actions: write contents: write diff --git a/.github/workflows/cancel_duplicates.yml b/.github/workflows/cancel_duplicates.yml index 751a498ab542..a749a2add605 100644 --- a/.github/workflows/cancel_duplicates.yml +++ b/.github/workflows/cancel_duplicates.yml @@ -9,7 +9,7 @@ on: jobs: cancel-duplicate-runs: name: Cancel duplicate workflow runs - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 permissions: actions: write contents: read diff --git a/.github/workflows/check_db_migration_confict.yml b/.github/workflows/check_db_migration_confict.yml index e717f41193d0..af291becde6f 100644 --- a/.github/workflows/check_db_migration_confict.yml +++ b/.github/workflows/check_db_migration_confict.yml @@ -19,7 +19,7 @@ concurrency: jobs: check_db_migration_conflict: name: Check DB migration conflict - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 permissions: contents: read pull-requests: write diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 6d0c12239170..773e7358345f 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -12,7 +12,7 @@ permissions: jobs: dependency-review: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: "Checkout Repository" uses: actions/checkout@v4 diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index f38cd4fee4d1..c8c4756ea543 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -15,7 +15,7 @@ concurrency: jobs: setup_matrix: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 outputs: matrix_config: ${{ steps.set_matrix.outputs.matrix_config }} steps: @@ -28,7 +28,7 @@ jobs: docker-build: name: docker-build needs: setup_matrix - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 strategy: matrix: build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}} diff --git a/.github/workflows/embedded-sdk-release.yml b/.github/workflows/embedded-sdk-release.yml index 323b1a9e99f1..b0c75343824d 100644 --- a/.github/workflows/embedded-sdk-release.yml +++ b/.github/workflows/embedded-sdk-release.yml @@ -8,7 +8,7 @@ on: jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} steps: @@ -23,7 +23,7 @@ jobs: build: needs: config if: needs.config.outputs.has-secrets - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 defaults: run: working-directory: superset-embedded-sdk diff --git a/.github/workflows/embedded-sdk-test.yml b/.github/workflows/embedded-sdk-test.yml index e3f3c1bdcac8..50058d0af75f 100644 --- a/.github/workflows/embedded-sdk-test.yml +++ b/.github/workflows/embedded-sdk-test.yml @@ -13,7 +13,7 @@ concurrency: jobs: embedded-sdk-test: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 defaults: run: working-directory: superset-embedded-sdk diff --git a/.github/workflows/ephemeral-env-pr-close.yml b/.github/workflows/ephemeral-env-pr-close.yml index e61559b05295..5fc634f6cdf7 100644 --- a/.github/workflows/ephemeral-env-pr-close.yml +++ b/.github/workflows/ephemeral-env-pr-close.yml @@ -6,7 +6,7 @@ on: jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} steps: @@ -22,7 +22,7 @@ jobs: needs: config if: needs.config.outputs.has-secrets name: Cleanup ephemeral envs - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: pull-requests: write steps: diff --git a/.github/workflows/ephemeral-env.yml b/.github/workflows/ephemeral-env.yml index a48f8eb6aa89..1cd80282ec24 100644 --- a/.github/workflows/ephemeral-env.yml +++ b/.github/workflows/ephemeral-env.yml @@ -6,7 +6,7 @@ on: jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" if: github.event.issue.pull_request outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} @@ -26,7 +26,7 @@ jobs: needs: config if: needs.config.outputs.has-secrets name: Evaluate ephemeral env comment trigger (/testenv) - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: pull-requests: write outputs: @@ -88,7 +88,7 @@ jobs: cancel-in-progress: true needs: ephemeral-env-comment name: ephemeral-docker-build - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Get Info from comment uses: actions/github-script@v7 @@ -153,7 +153,7 @@ jobs: needs: [ephemeral-env-comment, ephemeral-docker-build] if: needs.ephemeral-env-comment.outputs.slash-command == 'up' name: Spin up an ephemeral environment - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: contents: read pull-requests: write diff --git a/.github/workflows/generate-FOSSA-report.yml b/.github/workflows/generate-FOSSA-report.yml index 352ba845d9de..807c8ea40fc2 100644 --- a/.github/workflows/generate-FOSSA-report.yml +++ b/.github/workflows/generate-FOSSA-report.yml @@ -8,7 +8,7 @@ on: jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} steps: @@ -24,7 +24,7 @@ jobs: needs: config if: needs.config.outputs.has-secrets name: Generate Report - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 diff --git a/.github/workflows/github-action-validator.yml b/.github/workflows/github-action-validator.yml index 0dd50155310c..5acc5e0880e1 100644 --- a/.github/workflows/github-action-validator.yml +++ b/.github/workflows/github-action-validator.yml @@ -11,7 +11,7 @@ on: jobs: validate-all-ghas: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout Repository uses: actions/checkout@v4 diff --git a/.github/workflows/issue_creation.yml b/.github/workflows/issue_creation.yml index 2ea1de2b0a43..fc3b3e45bdf7 100644 --- a/.github/workflows/issue_creation.yml +++ b/.github/workflows/issue_creation.yml @@ -9,7 +9,7 @@ on: jobs: superbot-orglabel: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: contents: read pull-requests: write diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index 15a41995d1d5..5af67547f982 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -7,7 +7,7 @@ jobs: permissions: contents: read pull-requests: write - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/labeler@v5 with: diff --git a/.github/workflows/latest-release-tag.yml b/.github/workflows/latest-release-tag.yml index bd73462e895d..659214af9e09 100644 --- a/.github/workflows/latest-release-tag.yml +++ b/.github/workflows/latest-release-tag.yml @@ -6,7 +6,7 @@ on: jobs: latest-release: name: Add/update tag to new release - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: contents: write diff --git a/.github/workflows/license-check.yml b/.github/workflows/license-check.yml index 5f5468071d74..8974c5ae43ff 100644 --- a/.github/workflows/license-check.yml +++ b/.github/workflows/license-check.yml @@ -12,7 +12,7 @@ concurrency: jobs: license_check: name: License Check - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 diff --git a/.github/workflows/no-hold-label.yml b/.github/workflows/no-hold-label.yml index 73a3664e084f..866650ece4c1 100644 --- a/.github/workflows/no-hold-label.yml +++ b/.github/workflows/no-hold-label.yml @@ -11,7 +11,7 @@ concurrency: jobs: check-hold-label: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Check for 'hold' label uses: actions/github-script@v7 diff --git a/.github/workflows/pr-lint.yml b/.github/workflows/pr-lint.yml index e6ea96a6aea9..5ba91fee6ebf 100644 --- a/.github/workflows/pr-lint.yml +++ b/.github/workflows/pr-lint.yml @@ -10,7 +10,7 @@ on: jobs: lint-check: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: contents: read pull-requests: write diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 6ccb66df771f..af6765019250 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -15,7 +15,7 @@ concurrency: jobs: pre-commit: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 diff --git a/.github/workflows/prefer-typescript.yml b/.github/workflows/prefer-typescript.yml index 0b34f25bae4d..4739ae8b6bf8 100644 --- a/.github/workflows/prefer-typescript.yml +++ b/.github/workflows/prefer-typescript.yml @@ -21,7 +21,7 @@ jobs: prefer_typescript: if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request' name: Prefer TypeScript - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: contents: read pull-requests: write diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 43862bd50b5a..4435054a5c7a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,7 +8,7 @@ on: jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} steps: @@ -25,7 +25,7 @@ jobs: if: needs.config.outputs.has-secrets name: Bump version and publish package(s) - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 strategy: matrix: diff --git a/.github/workflows/superset-applitool-cypress.yml b/.github/workflows/superset-applitool-cypress.yml index 8e5aa91cdba6..72fd1a734321 100644 --- a/.github/workflows/superset-applitool-cypress.yml +++ b/.github/workflows/superset-applitool-cypress.yml @@ -6,7 +6,7 @@ on: jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} steps: @@ -21,7 +21,7 @@ jobs: cypress-applitools: needs: config if: needs.config.outputs.has-secrets - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: diff --git a/.github/workflows/superset-applitools-storybook.yml b/.github/workflows/superset-applitools-storybook.yml index 147d64d28641..5382120bc163 100644 --- a/.github/workflows/superset-applitools-storybook.yml +++ b/.github/workflows/superset-applitools-storybook.yml @@ -12,7 +12,7 @@ env: jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} steps: @@ -27,7 +27,7 @@ jobs: cron: needs: config if: needs.config.outputs.has-secrets - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 strategy: matrix: node: [18] diff --git a/.github/workflows/superset-cli.yml b/.github/workflows/superset-cli.yml index 060bae5dff37..29dd87adfe17 100644 --- a/.github/workflows/superset-cli.yml +++ b/.github/workflows/superset-cli.yml @@ -15,7 +15,7 @@ concurrency: jobs: test-load-examples: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 env: PYTHONPATH: ${{ github.workspace }} SUPERSET_CONFIG: tests.integration_tests.superset_test_config diff --git a/.github/workflows/superset-docs-deploy.yml b/.github/workflows/superset-docs-deploy.yml index 2bce8c023c6e..052eecdcab0c 100644 --- a/.github/workflows/superset-docs-deploy.yml +++ b/.github/workflows/superset-docs-deploy.yml @@ -12,7 +12,7 @@ on: jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} steps: @@ -28,7 +28,7 @@ jobs: needs: config if: needs.config.outputs.has-secrets name: Build & Deploy - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 diff --git a/.github/workflows/superset-docs-verify.yml b/.github/workflows/superset-docs-verify.yml index 9f665d8086bb..de82268e046d 100644 --- a/.github/workflows/superset-docs-verify.yml +++ b/.github/workflows/superset-docs-verify.yml @@ -14,7 +14,7 @@ concurrency: jobs: build-deploy: name: Build & Deploy - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 defaults: run: working-directory: docs diff --git a/.github/workflows/superset-e2e.yml b/.github/workflows/superset-e2e.yml index 06e4cffb0626..076894f25be2 100644 --- a/.github/workflows/superset-e2e.yml +++ b/.github/workflows/superset-e2e.yml @@ -28,7 +28,7 @@ concurrency: jobs: cypress-matrix: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 permissions: contents: read pull-requests: read diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 6438c2cc1d8d..b8c2b2c4e2d3 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -15,7 +15,7 @@ concurrency: jobs: frontend-build: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 diff --git a/.github/workflows/superset-helm-lint.yml b/.github/workflows/superset-helm-lint.yml index 48266a3299ea..5649f491a490 100644 --- a/.github/workflows/superset-helm-lint.yml +++ b/.github/workflows/superset-helm-lint.yml @@ -13,7 +13,7 @@ concurrency: jobs: lint-test: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 diff --git a/.github/workflows/superset-helm-release.yml b/.github/workflows/superset-helm-release.yml index 062e23758634..242820afce45 100644 --- a/.github/workflows/superset-helm-release.yml +++ b/.github/workflows/superset-helm-release.yml @@ -10,7 +10,7 @@ on: jobs: release: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: contents: write diff --git a/.github/workflows/superset-python-integrationtest.yml b/.github/workflows/superset-python-integrationtest.yml index 80b2a3b98b75..7cd135e55959 100644 --- a/.github/workflows/superset-python-integrationtest.yml +++ b/.github/workflows/superset-python-integrationtest.yml @@ -15,7 +15,7 @@ concurrency: jobs: test-mysql: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 env: PYTHONPATH: ${{ github.workspace }} SUPERSET_CONFIG: tests.integration_tests.superset_test_config @@ -74,7 +74,7 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} verbose: true test-postgres: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 strategy: matrix: python-version: ["current", "next", "previous"] @@ -136,7 +136,7 @@ jobs: verbose: true test-sqlite: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 env: PYTHONPATH: ${{ github.workspace }} SUPERSET_CONFIG: tests.integration_tests.superset_test_config diff --git a/.github/workflows/superset-python-misc.yml b/.github/workflows/superset-python-misc.yml index d58226216fcc..12417d147a50 100644 --- a/.github/workflows/superset-python-misc.yml +++ b/.github/workflows/superset-python-misc.yml @@ -16,7 +16,7 @@ concurrency: jobs: python-lint: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 @@ -33,7 +33,7 @@ jobs: if: steps.check.outputs.python babel-extract: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 diff --git a/.github/workflows/superset-python-presto-hive.yml b/.github/workflows/superset-python-presto-hive.yml index 6ab65430b406..d87a70964cc5 100644 --- a/.github/workflows/superset-python-presto-hive.yml +++ b/.github/workflows/superset-python-presto-hive.yml @@ -16,7 +16,7 @@ concurrency: jobs: test-postgres-presto: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 env: PYTHONPATH: ${{ github.workspace }} SUPERSET_CONFIG: tests.integration_tests.superset_test_config @@ -84,7 +84,7 @@ jobs: verbose: true test-postgres-hive: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 env: PYTHONPATH: ${{ github.workspace }} SUPERSET_CONFIG: tests.integration_tests.superset_test_config diff --git a/.github/workflows/superset-python-unittest.yml b/.github/workflows/superset-python-unittest.yml index 454ee0c61e08..0f9cfc8aa10e 100644 --- a/.github/workflows/superset-python-unittest.yml +++ b/.github/workflows/superset-python-unittest.yml @@ -16,7 +16,7 @@ concurrency: jobs: unit-tests: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 strategy: matrix: python-version: ["current", "next"] diff --git a/.github/workflows/superset-translations.yml b/.github/workflows/superset-translations.yml index 11dbebb09800..292a42afdb83 100644 --- a/.github/workflows/superset-translations.yml +++ b/.github/workflows/superset-translations.yml @@ -15,7 +15,7 @@ concurrency: jobs: frontend-check-translations: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 @@ -46,7 +46,7 @@ jobs: npm run build-translation babel-extract: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 diff --git a/.github/workflows/superset-websocket.yml b/.github/workflows/superset-websocket.yml index 2d55ceafa03c..f1785a39abe4 100644 --- a/.github/workflows/superset-websocket.yml +++ b/.github/workflows/superset-websocket.yml @@ -18,7 +18,7 @@ concurrency: jobs: app-checks: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 diff --git a/.github/workflows/supersetbot.yml b/.github/workflows/supersetbot.yml index 88d4648cc78d..f7e106ed9c7a 100644 --- a/.github/workflows/supersetbot.yml +++ b/.github/workflows/supersetbot.yml @@ -15,7 +15,7 @@ on: jobs: supersetbot: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 if: > github.event_name == 'workflow_dispatch' || (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot')) diff --git a/.github/workflows/tag-release.yml b/.github/workflows/tag-release.yml index dc0ecd7e7f66..8ee03b3d04c2 100644 --- a/.github/workflows/tag-release.yml +++ b/.github/workflows/tag-release.yml @@ -23,7 +23,7 @@ on: - 'false' jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} steps: @@ -39,7 +39,7 @@ jobs: needs: config if: needs.config.outputs.has-secrets name: docker-release - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 strategy: matrix: build_preset: ["dev", "lean", "py310", "websocket", "dockerize"] diff --git a/.github/workflows/tech-debt.yml b/.github/workflows/tech-debt.yml index 6f73a3a51b62..f17e220a402b 100644 --- a/.github/workflows/tech-debt.yml +++ b/.github/workflows/tech-debt.yml @@ -8,7 +8,7 @@ on: jobs: config: - runs-on: "ubuntu-latest" + runs-on: "ubuntu-22.04" outputs: has-secrets: ${{ steps.check.outputs.has-secrets }} steps: @@ -23,7 +23,7 @@ jobs: process-and-upload: needs: config if: needs.config.outputs.has-secrets - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 name: Generate Reports steps: - name: Checkout Repository diff --git a/.github/workflows/welcome-new-users.yml b/.github/workflows/welcome-new-users.yml index 0144e20892d4..2c602967770d 100644 --- a/.github/workflows/welcome-new-users.yml +++ b/.github/workflows/welcome-new-users.yml @@ -6,7 +6,7 @@ on: jobs: welcome: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: pull-requests: write From 839ca82a19bb9c0b88fcc45b4dad5c7d2981ba7f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 14:41:10 -0600 Subject: [PATCH 27/31] chore(deps): bump react-markdown from 8.0.3 to 8.0.7 in /superset-frontend (#29439) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-frontend/package-lock.json | 50 ++++++------------- .../packages/superset-ui-core/package.json | 2 +- 2 files changed, 17 insertions(+), 35 deletions(-) diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 3346ee793c2b..7ab87afc1962 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -39077,15 +39077,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/hast-util-to-estree/node_modules/style-to-object": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.4.tgz", - "integrity": "sha512-HYNoHZa2GorYNyqiCaBgsxvcJIn7OHq6inEga+E6Ke3m5JkoqpQbnFssk4jwe+K7AhGa2fcha4wSOf1Kn01dMg==", - "dev": true, - "dependencies": { - "inline-style-parser": "0.1.1" - } - }, "node_modules/hast-util-to-parse5": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz", @@ -59603,9 +59594,9 @@ } }, "node_modules/react-markdown": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-8.0.3.tgz", - "integrity": "sha512-We36SfqaKoVNpN1QqsZwWSv/OZt5J15LNgTLWynwAN5b265hrQrsjMtlRNwUvS+YyR3yDM8HpTNc4pK9H/Gc0A==", + "version": "8.0.7", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-8.0.7.tgz", + "integrity": "sha512-bvWbzG4MtOU62XqBx3Xx+zB2raaFFsq4mYiAzfjXJMEz2sixgeAfraA3tvzULF02ZdOMUOKTBFFaZJDDrq+BJQ==", "dependencies": { "@types/hast": "^2.0.0", "@types/prop-types": "^15.0.0", @@ -59618,7 +59609,7 @@ "remark-parse": "^10.0.0", "remark-rehype": "^10.0.0", "space-separated-tokens": "^2.0.0", - "style-to-object": "^0.3.0", + "style-to-object": "^0.4.0", "unified": "^10.0.0", "unist-util-visit": "^4.0.0", "vfile": "^5.0.0" @@ -63643,9 +63634,9 @@ } }, "node_modules/style-to-object": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz", - "integrity": "sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==", + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.4.tgz", + "integrity": "sha512-HYNoHZa2GorYNyqiCaBgsxvcJIn7OHq6inEga+E6Ke3m5JkoqpQbnFssk4jwe+K7AhGa2fcha4wSOf1Kn01dMg==", "dependencies": { "inline-style-parser": "0.1.1" } @@ -68708,7 +68699,7 @@ "math-expression-evaluator": "^1.3.8", "pretty-ms": "^7.0.0", "react-error-boundary": "^1.2.5", - "react-markdown": "^8.0.3", + "react-markdown": "^8.0.7", "rehype-raw": "^7.0.0", "rehype-sanitize": "^6.0.0", "remark-gfm": "^3.0.1", @@ -87742,7 +87733,7 @@ "math-expression-evaluator": "^1.3.8", "pretty-ms": "^7.0.0", "react-error-boundary": "^1.2.5", - "react-markdown": "^8.0.3", + "react-markdown": "^8.0.7", "rehype-raw": "^7.0.0", "rehype-sanitize": "^6.0.0", "remark-gfm": "^3.0.1", @@ -103847,15 +103838,6 @@ "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", "dev": true - }, - "style-to-object": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.4.tgz", - "integrity": "sha512-HYNoHZa2GorYNyqiCaBgsxvcJIn7OHq6inEga+E6Ke3m5JkoqpQbnFssk4jwe+K7AhGa2fcha4wSOf1Kn01dMg==", - "dev": true, - "requires": { - "inline-style-parser": "0.1.1" - } } } }, @@ -119097,9 +119079,9 @@ } }, "react-markdown": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-8.0.3.tgz", - "integrity": "sha512-We36SfqaKoVNpN1QqsZwWSv/OZt5J15LNgTLWynwAN5b265hrQrsjMtlRNwUvS+YyR3yDM8HpTNc4pK9H/Gc0A==", + "version": "8.0.7", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-8.0.7.tgz", + "integrity": "sha512-bvWbzG4MtOU62XqBx3Xx+zB2raaFFsq4mYiAzfjXJMEz2sixgeAfraA3tvzULF02ZdOMUOKTBFFaZJDDrq+BJQ==", "requires": { "@types/hast": "^2.0.0", "@types/prop-types": "^15.0.0", @@ -119112,7 +119094,7 @@ "remark-parse": "^10.0.0", "remark-rehype": "^10.0.0", "space-separated-tokens": "^2.0.0", - "style-to-object": "^0.3.0", + "style-to-object": "^0.4.0", "unified": "^10.0.0", "unist-util-visit": "^4.0.0", "vfile": "^5.0.0" @@ -122245,9 +122227,9 @@ "requires": {} }, "style-to-object": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz", - "integrity": "sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==", + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.4.tgz", + "integrity": "sha512-HYNoHZa2GorYNyqiCaBgsxvcJIn7OHq6inEga+E6Ke3m5JkoqpQbnFssk4jwe+K7AhGa2fcha4wSOf1Kn01dMg==", "requires": { "inline-style-parser": "0.1.1" } diff --git a/superset-frontend/packages/superset-ui-core/package.json b/superset-frontend/packages/superset-ui-core/package.json index e69522494747..7f9144cda064 100644 --- a/superset-frontend/packages/superset-ui-core/package.json +++ b/superset-frontend/packages/superset-ui-core/package.json @@ -53,7 +53,7 @@ "math-expression-evaluator": "^1.3.8", "pretty-ms": "^7.0.0", "react-error-boundary": "^1.2.5", - "react-markdown": "^8.0.3", + "react-markdown": "^8.0.7", "rehype-raw": "^7.0.0", "rehype-sanitize": "^6.0.0", "remark-gfm": "^3.0.1", From 1e73820277134fd6d783ce4c083a1bd5cee0fccb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 15:03:13 -0600 Subject: [PATCH 28/31] chore(deps): bump @algolia/client-search from 4.23.3 to 4.24.0 in /docs (#29428) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/package.json | 2 +- docs/yarn.lock | 43 ++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/docs/package.json b/docs/package.json index 225bfb42520f..af64b5c098e2 100644 --- a/docs/package.json +++ b/docs/package.json @@ -17,7 +17,7 @@ "typecheck": "tsc" }, "dependencies": { - "@algolia/client-search": "^4.23.3", + "@algolia/client-search": "^4.24.0", "@ant-design/icons": "^5.3.7", "@docsearch/react": "^3.6.0", "@docusaurus/core": "^3.3.2", diff --git a/docs/yarn.lock b/docs/yarn.lock index 70743d4e4734..c54b07b56a5e 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -41,6 +41,11 @@ resolved "https://registry.yarnpkg.com/@algolia/cache-common/-/cache-common-4.23.3.tgz#3bec79092d512a96c9bfbdeec7cff4ad36367166" integrity sha512-h9XcNI6lxYStaw32pHpB1TMm0RuxphF+Ik4o7tcQiodEdpKK+wKufY6QXtba7t3k8eseirEMVB83uFFF3Nu54A== +"@algolia/cache-common@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/cache-common/-/cache-common-4.24.0.tgz#81a8d3a82ceb75302abb9b150a52eba9960c9744" + integrity sha512-emi+v+DmVLpMGhp0V9q9h5CdkURsNmFC+cOS6uK9ndeJm9J4TiqSvPYVu+THUP8P/S08rxf5x2P+p3CfID0Y4g== + "@algolia/cache-in-memory@4.23.3": version "4.23.3" resolved "https://registry.yarnpkg.com/@algolia/cache-in-memory/-/cache-in-memory-4.23.3.tgz#3945f87cd21ffa2bec23890c85305b6b11192423" @@ -75,6 +80,14 @@ "@algolia/requester-common" "4.23.3" "@algolia/transporter" "4.23.3" +"@algolia/client-common@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/client-common/-/client-common-4.24.0.tgz#77c46eee42b9444a1d1c1583a83f7df4398a649d" + integrity sha512-bc2ROsNL6w6rqpl5jj/UywlIYC21TwSSoFHKl01lYirGMW+9Eek6r02Tocg4gZ8HAw3iBvu6XQiM3BEbmEMoiA== + dependencies: + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" + "@algolia/client-personalization@4.23.3": version "4.23.3" resolved "https://registry.yarnpkg.com/@algolia/client-personalization/-/client-personalization-4.23.3.tgz#35fa8e5699b0295fbc400a8eb211dc711e5909db" @@ -84,7 +97,7 @@ "@algolia/requester-common" "4.23.3" "@algolia/transporter" "4.23.3" -"@algolia/client-search@4.23.3", "@algolia/client-search@^4.23.3": +"@algolia/client-search@4.23.3": version "4.23.3" resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-4.23.3.tgz#a3486e6af13a231ec4ab43a915a1f318787b937f" integrity sha512-P4VAKFHqU0wx9O+q29Q8YVuaowaZ5EM77rxfmGnkHUJggh28useXQdopokgwMeYw2XUht49WX5RcTQ40rZIabw== @@ -93,6 +106,15 @@ "@algolia/requester-common" "4.23.3" "@algolia/transporter" "4.23.3" +"@algolia/client-search@^4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-4.24.0.tgz#75e6c02d33ef3e0f34afd9962c085b856fc4a55f" + integrity sha512-uRW6EpNapmLAD0mW47OXqTP8eiIx5F6qN9/x/7HHO6owL3N1IXqydGwW5nhDFBrV+ldouro2W1VX3XlcUXEFCA== + dependencies: + "@algolia/client-common" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" + "@algolia/events@^4.0.1": version "4.0.1" resolved "https://registry.yarnpkg.com/@algolia/events/-/events-4.0.1.tgz#fd39e7477e7bc703d7f893b556f676c032af3950" @@ -103,6 +125,11 @@ resolved "https://registry.yarnpkg.com/@algolia/logger-common/-/logger-common-4.23.3.tgz#35c6d833cbf41e853a4f36ba37c6e5864920bfe9" integrity sha512-y9kBtmJwiZ9ZZ+1Ek66P0M68mHQzKRxkW5kAAXYN/rdzgDN0d2COsViEFufxJ0pb45K4FRcfC7+33YB4BLrZ+g== +"@algolia/logger-common@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/logger-common/-/logger-common-4.24.0.tgz#28d439976019ec0a46ba7a1a739ef493d4ef8123" + integrity sha512-LLUNjkahj9KtKYrQhFKCzMx0BY3RnNP4FEtO+sBybCjJ73E8jNdaKJ/Dd8A/VA4imVHP5tADZ8pn5B8Ga/wTMA== + "@algolia/logger-console@4.23.3": version "4.23.3" resolved "https://registry.yarnpkg.com/@algolia/logger-console/-/logger-console-4.23.3.tgz#30f916781826c4db5f51fcd9a8a264a06e136985" @@ -139,6 +166,11 @@ resolved "https://registry.yarnpkg.com/@algolia/requester-common/-/requester-common-4.23.3.tgz#7dbae896e41adfaaf1d1fa5f317f83a99afb04b3" integrity sha512-xloIdr/bedtYEGcXCiF2muajyvRhwop4cMZo+K2qzNht0CMzlRkm8YsDdj5IaBhshqfgmBb3rTg4sL4/PpvLYw== +"@algolia/requester-common@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/requester-common/-/requester-common-4.24.0.tgz#1c60c198031f48fcdb9e34c4057a3ea987b9a436" + integrity sha512-k3CXJ2OVnvgE3HMwcojpvY6d9kgKMPRxs/kVohrwF5WMr2fnqojnycZkxPoEg+bXm8fi5BBfFmOqgYztRtHsQA== + "@algolia/requester-node-http@4.23.3": version "4.23.3" resolved "https://registry.yarnpkg.com/@algolia/requester-node-http/-/requester-node-http-4.23.3.tgz#c9f94a5cb96a15f48cea338ab6ef16bbd0ff989f" @@ -155,6 +187,15 @@ "@algolia/logger-common" "4.23.3" "@algolia/requester-common" "4.23.3" +"@algolia/transporter@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/transporter/-/transporter-4.24.0.tgz#226bb1f8af62430374c1972b2e5c8580ab275102" + integrity sha512-86nI7w6NzWxd1Zp9q3413dRshDqAzSbsQjhcDhPIatEFiZrL1/TjnHL8S7jVKFePlIMzDsZWXAXwXzcok9c5oA== + dependencies: + "@algolia/cache-common" "4.24.0" + "@algolia/logger-common" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@ampproject/remapping@^2.2.0": version "2.3.0" resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.3.0.tgz#ed441b6fa600072520ce18b43d2c8cc8caecc7f4" From cf031bbee4810d8da5402c58b302aec2882dd74b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 15:04:47 -0600 Subject: [PATCH 29/31] chore(deps-dev): bump webpack from 5.91.0 to 5.92.1 in /docs (#29429) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/package.json | 2 +- docs/yarn.lock | 28 ++++++++++++++-------------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/package.json b/docs/package.json index af64b5c098e2..4774a3ac83f7 100644 --- a/docs/package.json +++ b/docs/package.json @@ -50,7 +50,7 @@ "@docusaurus/tsconfig": "^3.4.0", "@types/react": "^18.3.3", "typescript": "^5.5.2", - "webpack": "^5.91.0" + "webpack": "^5.92.1" }, "browserslist": { "production": [ diff --git a/docs/yarn.lock b/docs/yarn.lock index c54b07b56a5e..8e9c47269db2 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -3127,10 +3127,10 @@ accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: mime-types "~2.1.34" negotiator "0.6.3" -acorn-import-assertions@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" - integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== +acorn-import-attributes@^1.9.5: + version "1.9.5" + resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" + integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== acorn-jsx@^5.0.0: version "5.3.2" @@ -4658,10 +4658,10 @@ end-of-stream@^1.1.0, end-of-stream@^1.4.1: dependencies: once "^1.4.0" -enhanced-resolve@^5.16.0: - version "5.16.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.16.0.tgz#65ec88778083056cb32487faa9aef82ed0864787" - integrity sha512-O+QWCviPNSSLAD9Ucn8Awv+poAkqn3T1XY5/N7kR7rQO9yfSGWkYZDwpJ+iKF7B8rxaQKWngSqACpgzeapSyoA== +enhanced-resolve@^5.17.0: + version "5.17.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.0.tgz#d037603789dd9555b89aaec7eb78845c49089bc5" + integrity sha512-dwDPwZL0dmye8Txp2gzFmA6sxALaSvdRDjPH0viLcKrtlOL3tw62nWWweVD1SdILDTJrbrL6tdWVN58Wo6U3eA== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -10286,10 +10286,10 @@ webpack-sources@^3.2.3: resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== -webpack@^5.88.1, webpack@^5.91.0: - version "5.91.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.91.0.tgz#ffa92c1c618d18c878f06892bbdc3373c71a01d9" - integrity sha512-rzVwlLeBWHJbmgTC/8TvAcu5vpJNII+MelQpylD4jNERPwpBJOE2lEcko1zJX3QJeLjTTAnQxn/OJ8bjDzVQaw== +webpack@^5.88.1, webpack@^5.92.1: + version "5.92.1" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.92.1.tgz#eca5c1725b9e189cffbd86e8b6c3c7400efc5788" + integrity sha512-JECQ7IwJb+7fgUFBlrJzbyu3GEuNBcdqr1LD7IbSzwkSmIevTm8PF+wej3Oxuz/JFBUZ6O1o43zsPkwm1C4TmA== dependencies: "@types/eslint-scope" "^3.7.3" "@types/estree" "^1.0.5" @@ -10297,10 +10297,10 @@ webpack@^5.88.1, webpack@^5.91.0: "@webassemblyjs/wasm-edit" "^1.12.1" "@webassemblyjs/wasm-parser" "^1.12.1" acorn "^8.7.1" - acorn-import-assertions "^1.9.0" + acorn-import-attributes "^1.9.5" browserslist "^4.21.10" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.16.0" + enhanced-resolve "^5.17.0" es-module-lexer "^1.2.1" eslint-scope "5.1.1" events "^3.2.0" From 7a0ae36c4ab3833ccca98049b60065a887ca2e51 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 15:16:11 -0700 Subject: [PATCH 30/31] chore(deps): bump actions/checkout from 2 to 4 (#29434) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/superset-e2e.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/superset-e2e.yml b/.github/workflows/superset-e2e.yml index 076894f25be2..092864246240 100644 --- a/.github/workflows/superset-e2e.yml +++ b/.github/workflows/superset-e2e.yml @@ -66,20 +66,20 @@ jobs: # Conditional checkout based on context - name: Checkout for push or pull_request event if: github.event_name == 'push' || github.event_name == 'pull_request' - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: persist-credentials: false submodules: recursive - name: Checkout using ref (workflow_dispatch) if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != '' - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: persist-credentials: false ref: ${{ github.event.inputs.ref }} submodules: recursive - name: Checkout using PR ID (workflow_dispatch) if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != '' - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: persist-credentials: false ref: refs/pull/${{ github.event.inputs.pr_id }}/merge From 3449b8f9dc4468de6a2e1d1992bafc7a328cac86 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 17:02:44 -0600 Subject: [PATCH 31/31] chore(deps-dev): update @types/lodash requirement from ^4.17.4 to ^4.17.6 in /superset-frontend/plugins/plugin-chart-handlebars (#29425) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Evan Rusackas --- superset-frontend/package-lock.json | 16 ++++++++-------- .../plugins/plugin-chart-handlebars/package.json | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 7ab87afc1962..676b956c150d 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -70520,7 +70520,7 @@ }, "devDependencies": { "@types/jest": "^29.5.12", - "@types/lodash": "^4.17.4", + "@types/lodash": "^4.17.6", "jest": "^29.7.0" }, "peerDependencies": { @@ -70535,9 +70535,9 @@ } }, "plugins/plugin-chart-handlebars/node_modules/@types/lodash": { - "version": "4.17.4", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.4.tgz", - "integrity": "sha512-wYCP26ZLxaT3R39kiN2+HcJ4kTd3U1waI/cY7ivWYqFP6pW3ZNpvi6Wd6PHZx7T/t8z0vlkXMg3QYLa7DZ/IJQ==", + "version": "4.17.6", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.6.tgz", + "integrity": "sha512-OpXEVoCKSS3lQqjx9GGGOapBeuW5eUboYHRlHP9urXPX25IKZ6AnP5ZRxtVf63iieUbsHxLn8NQ5Nlftc6yzAA==", "dev": true }, "plugins/plugin-chart-handlebars/node_modules/just-handlebars-helpers": { @@ -89053,16 +89053,16 @@ "version": "file:plugins/plugin-chart-handlebars", "requires": { "@types/jest": "^29.5.12", - "@types/lodash": "^4.17.4", + "@types/lodash": "^4.17.6", "handlebars": "^4.7.7", "jest": "^29.7.0", "just-handlebars-helpers": "^1.0.19" }, "dependencies": { "@types/lodash": { - "version": "4.17.4", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.4.tgz", - "integrity": "sha512-wYCP26ZLxaT3R39kiN2+HcJ4kTd3U1waI/cY7ivWYqFP6pW3ZNpvi6Wd6PHZx7T/t8z0vlkXMg3QYLa7DZ/IJQ==", + "version": "4.17.6", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.6.tgz", + "integrity": "sha512-OpXEVoCKSS3lQqjx9GGGOapBeuW5eUboYHRlHP9urXPX25IKZ6AnP5ZRxtVf63iieUbsHxLn8NQ5Nlftc6yzAA==", "dev": true }, "just-handlebars-helpers": { diff --git a/superset-frontend/plugins/plugin-chart-handlebars/package.json b/superset-frontend/plugins/plugin-chart-handlebars/package.json index ed63f04ad9bd..9ce608cd1be2 100644 --- a/superset-frontend/plugins/plugin-chart-handlebars/package.json +++ b/superset-frontend/plugins/plugin-chart-handlebars/package.json @@ -42,7 +42,7 @@ }, "devDependencies": { "@types/jest": "^29.5.12", - "@types/lodash": "^4.17.4", + "@types/lodash": "^4.17.6", "jest": "^29.7.0" } }