Skip to content

Commit 273c72a

Browse files
committed
Merge branch 'master' of https://github.com/apache/superset
2 parents a9ed918 + c2baba5 commit 273c72a

File tree

145 files changed

+7874
-2626
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

145 files changed

+7874
-2626
lines changed

.asf.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@ github:
8383
- cypress-matrix (5, chrome)
8484
- dependency-review
8585
- frontend-build
86+
- playwright-tests (chromium)
8687
- pre-commit (current)
8788
- pre-commit (previous)
8889
- test-mysql

.github/workflows/bashlib.sh

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -195,6 +195,7 @@ playwright-install() {
195195

196196
playwright-run() {
197197
local APP_ROOT=$1
198+
local TEST_PATH=$2
198199

199200
# Start Flask from the project root (same as Cypress)
200201
cd "$GITHUB_WORKSPACE"
@@ -238,8 +239,26 @@ playwright-run() {
238239

239240
say "::group::Run Playwright tests"
240241
echo "Running Playwright with baseURL: ${PLAYWRIGHT_BASE_URL}"
241-
npx playwright test auth/login --reporter=github --output=playwright-results
242-
local status=$?
242+
if [ -n "$TEST_PATH" ]; then
243+
# Check if there are any test files in the specified path
244+
if ! find "playwright/tests/${TEST_PATH}" -name "*.spec.ts" -type f 2>/dev/null | grep -q .; then
245+
echo "No test files found in ${TEST_PATH} - skipping test run"
246+
say "::endgroup::"
247+
kill $flaskProcessId
248+
return 0
249+
fi
250+
echo "Running tests: ${TEST_PATH}"
251+
# Set INCLUDE_EXPERIMENTAL=true to allow experimental tests to run
252+
export INCLUDE_EXPERIMENTAL=true
253+
npx playwright test "${TEST_PATH}" --output=playwright-results
254+
local status=$?
255+
# Unset to prevent leaking into subsequent commands
256+
unset INCLUDE_EXPERIMENTAL
257+
else
258+
echo "Running all required tests (experimental/ excluded via playwright.config.ts)"
259+
npx playwright test --output=playwright-results
260+
local status=$?
261+
fi
243262
say "::endgroup::"
244263

245264
# After job is done, print out Flask log for debugging

.github/workflows/superset-e2e.yml

Lines changed: 115 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,3 +151,118 @@ jobs:
151151
with:
152152
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
153153
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}
154+
155+
playwright-tests:
156+
runs-on: ubuntu-22.04
157+
permissions:
158+
contents: read
159+
pull-requests: read
160+
strategy:
161+
fail-fast: false
162+
matrix:
163+
browser: ["chromium"]
164+
app_root: ["", "/app/prefix"]
165+
env:
166+
SUPERSET_ENV: development
167+
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
168+
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:[email protected]:15432/superset
169+
PYTHONPATH: ${{ github.workspace }}
170+
REDIS_PORT: 16379
171+
GITHUB_TOKEN: ${{ github.token }}
172+
services:
173+
postgres:
174+
image: postgres:16-alpine
175+
env:
176+
POSTGRES_USER: superset
177+
POSTGRES_PASSWORD: superset
178+
ports:
179+
- 15432:5432
180+
redis:
181+
image: redis:7-alpine
182+
ports:
183+
- 16379:6379
184+
steps:
185+
# -------------------------------------------------------
186+
# Conditional checkout based on context (same as Cypress workflow)
187+
- name: Checkout for push or pull_request event
188+
if: github.event_name == 'push' || github.event_name == 'pull_request'
189+
uses: actions/checkout@v5
190+
with:
191+
persist-credentials: false
192+
submodules: recursive
193+
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
194+
- name: Checkout using ref (workflow_dispatch)
195+
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
196+
uses: actions/checkout@v5
197+
with:
198+
persist-credentials: false
199+
ref: ${{ github.event.inputs.ref }}
200+
submodules: recursive
201+
- name: Checkout using PR ID (workflow_dispatch)
202+
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
203+
uses: actions/checkout@v5
204+
with:
205+
persist-credentials: false
206+
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
207+
submodules: recursive
208+
# -------------------------------------------------------
209+
- name: Check for file changes
210+
id: check
211+
uses: ./.github/actions/change-detector/
212+
with:
213+
token: ${{ secrets.GITHUB_TOKEN }}
214+
- name: Setup Python
215+
uses: ./.github/actions/setup-backend/
216+
if: steps.check.outputs.python || steps.check.outputs.frontend
217+
- name: Setup postgres
218+
if: steps.check.outputs.python || steps.check.outputs.frontend
219+
uses: ./.github/actions/cached-dependencies
220+
with:
221+
run: setup-postgres
222+
- name: Import test data
223+
if: steps.check.outputs.python || steps.check.outputs.frontend
224+
uses: ./.github/actions/cached-dependencies
225+
with:
226+
run: testdata
227+
- name: Setup Node.js
228+
if: steps.check.outputs.python || steps.check.outputs.frontend
229+
uses: actions/setup-node@v5
230+
with:
231+
node-version-file: './superset-frontend/.nvmrc'
232+
- name: Install npm dependencies
233+
if: steps.check.outputs.python || steps.check.outputs.frontend
234+
uses: ./.github/actions/cached-dependencies
235+
with:
236+
run: npm-install
237+
- name: Build javascript packages
238+
if: steps.check.outputs.python || steps.check.outputs.frontend
239+
uses: ./.github/actions/cached-dependencies
240+
with:
241+
run: build-instrumented-assets
242+
- name: Install Playwright
243+
if: steps.check.outputs.python || steps.check.outputs.frontend
244+
uses: ./.github/actions/cached-dependencies
245+
with:
246+
run: playwright-install
247+
- name: Run Playwright (Required Tests)
248+
if: steps.check.outputs.python || steps.check.outputs.frontend
249+
uses: ./.github/actions/cached-dependencies
250+
env:
251+
NODE_OPTIONS: "--max-old-space-size=4096"
252+
with:
253+
run: playwright-run "${{ matrix.app_root }}"
254+
- name: Set safe app root
255+
if: failure()
256+
id: set-safe-app-root
257+
run: |
258+
APP_ROOT="${{ matrix.app_root }}"
259+
SAFE_APP_ROOT=${APP_ROOT//\//_}
260+
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
261+
- name: Upload Playwright Artifacts
262+
uses: actions/upload-artifact@v4
263+
if: failure()
264+
with:
265+
path: |
266+
${{ github.workspace }}/superset-frontend/playwright-results/
267+
${{ github.workspace }}/superset-frontend/test-results/
268+
name: playwright-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}

.github/workflows/superset-playwright.yml

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: Playwright E2E Tests
1+
name: Playwright Experimental Tests
22

33
on:
44
push:
@@ -23,9 +23,10 @@ concurrency:
2323
cancel-in-progress: true
2424

2525
jobs:
26-
playwright-tests:
26+
# NOTE: Required Playwright tests are in superset-e2e.yml (E2E / playwright-tests)
27+
# This workflow contains only experimental tests that run in shadow mode
28+
playwright-tests-experimental:
2729
runs-on: ubuntu-22.04
28-
# Allow workflow to succeed even if tests fail during shadow mode
2930
continue-on-error: true
3031
permissions:
3132
contents: read
@@ -117,13 +118,13 @@ jobs:
117118
uses: ./.github/actions/cached-dependencies
118119
with:
119120
run: playwright-install
120-
- name: Run Playwright
121+
- name: Run Playwright (Experimental Tests)
121122
if: steps.check.outputs.python || steps.check.outputs.frontend
122123
uses: ./.github/actions/cached-dependencies
123124
env:
124125
NODE_OPTIONS: "--max-old-space-size=4096"
125126
with:
126-
run: playwright-run ${{ matrix.app_root }}
127+
run: playwright-run "${{ matrix.app_root }}" experimental/
127128
- name: Set safe app root
128129
if: failure()
129130
id: set-safe-app-root
@@ -138,4 +139,4 @@ jobs:
138139
path: |
139140
${{ github.workspace }}/superset-frontend/playwright-results/
140141
${{ github.workspace }}/superset-frontend/test-results/
141-
name: playwright-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}
142+
name: playwright-experimental-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ cover
3333
.env
3434
.envrc
3535
.idea
36+
.roo
3637
.mypy_cache
3738
.python-version
3839
.tox

docs/developer_portal/extensions/interacting-with-host.md

Lines changed: 25 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ under the License.
2626

2727
Extensions interact with Superset through well-defined, versioned APIs provided by the `@apache-superset/core` (frontend) and `apache-superset-core` (backend) packages. These APIs are designed to be stable, discoverable, and consistent for both built-in and external extensions.
2828

29+
**Note**: The `superset_core.api` module provides abstract classes that are replaced with concrete implementations via dependency injection when Superset initializes. This allows extensions to use the same interfaces as the host application.
30+
2931
**Frontend APIs** (via `@apache-superset/core)`:
3032

3133
The frontend extension APIs in Superset are organized into logical namespaces such as `authentication`, `commands`, `extensions`, `sqlLab`, and others. Each namespace groups related functionality, making it easy for extension authors to discover and use the APIs relevant to their needs. For example, the `sqlLab` namespace provides events and methods specific to SQL Lab, allowing extensions to react to user actions and interact with the SQL Lab environment:
@@ -90,31 +92,38 @@ Backend APIs follow a similar pattern, providing access to Superset's models, se
9092
Extension endpoints are registered under a dedicated `/extensions` namespace to avoid conflicting with built-in endpoints and also because they don't share the same version constraints. By grouping all extension endpoints under `/extensions`, Superset establishes a clear boundary between core and extension functionality, making it easier to manage, document, and secure both types of APIs.
9193

9294
``` python
93-
from superset_core.api import rest_api, models, query
95+
from superset_core.api.models import Database, get_session
96+
from superset_core.api.daos import DatabaseDAO
97+
from superset_core.api.rest_api import add_extension_api
9498
from .api import DatasetReferencesAPI
9599

96100
# Register a new extension REST API
97-
rest_api.add_extension_api(DatasetReferencesAPI)
101+
add_extension_api(DatasetReferencesAPI)
102+
103+
# Fetch Superset entities via the DAO to apply base filters that filter out entities
104+
# that the user doesn't have access to
105+
databases = DatabaseDAO.find_all()
98106

99-
# Access Superset models with simple queries that filter out entities that
100-
# the user doesn't have access to
101-
databases = models.get_databases(id=database_id)
107+
# ..or apply simple filters on top of base filters
108+
databases = DatabaseDAO.filter_by(uuid=database.uuid)
102109
if not databases:
103-
return self.response_404()
110+
raise Exception("Database not found")
104111

105-
database = databases[0]
112+
return databases[0]
106113

107-
# Perform complex queries using SQLAlchemy BaseQuery, also filtering
108-
# out inaccessible entities
109-
session = models.get_session()
110-
db_model = models.get_database_model())
111-
database_query = session.query(db_model.database_name.ilike("%abc%")
112-
databases_containing_abc = models.get_databases(query)
114+
# Perform complex queries using SQLAlchemy Query, also filtering out
115+
# inaccessible entities
116+
session = get_session()
117+
databases_query = session.query(Database).filter(
118+
Database.database_name.ilike("%abc%")
119+
)
120+
return DatabaseDAO.query(databases_query)
113121

114122
# Bypass security model for highly custom use cases
115-
session = models.get_session()
116-
db_model = models.get_database_model())
117-
all_databases_containg_abc = session.query(db_model.database_name.ilike("%abc%").all()
123+
session = get_session()
124+
all_databases_containing_abc = session.query(Database).filter(
125+
Database.database_name.ilike("%abc%")
126+
).all()
118127
```
119128

120129
In the future, we plan to expand the backend APIs to support configuring security models, database engines, SQL Alchemy dialects, etc.

docs/developer_portal/extensions/quick-start.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ The CLI generated a basic `backend/src/hello_world/entrypoint.py`. We'll create
128128
```python
129129
from flask import Response
130130
from flask_appbuilder.api import expose, protect, safe
131-
from superset_core.api.types.rest_api import RestApi
131+
from superset_core.api.rest_api import RestApi
132132

133133

134134
class HelloWorldAPI(RestApi):

docs/docs/contributing/howtos.mdx

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -166,6 +166,56 @@ server:
166166
npm run dev-server
167167
```
168168
169+
#### Deploying your visualization plugin
170+
171+
Once your plugin is complete, you will need to deploy it to your superset instance.
172+
173+
This step assumes you are running your own Docker image as described [here](https://superset.apache.org/docs/installation/docker-builds/#building-your-own-production-docker-image).
174+
Instructions may vary for other kinds of deployments.
175+
176+
If you have your own Superset Docker image, the first line is most likely:
177+
`FROM apache/superset:latest` or something similar. You will need to compile
178+
your own `"lean"` image and replace this FROM line with your own image.
179+
180+
1. Publish your chart plugin to npm: it makes the build process simpler.
181+
182+
Note: if your chart is not published to npm, then in the docker build below, you will need
183+
to edit the default Dockerfile to copy your plugin source code to the appropriate
184+
location in the container build environment.
185+
186+
2. Install your chart in the frontend with `npm i <your_chart_package>`.
187+
3. Start with a base superset release.
188+
189+
```bash
190+
git checkout tags/X.0.0
191+
```
192+
193+
4. Install your chart with the instructions you followed during development.
194+
5. Navigate to the root of your superset directory.
195+
6. Run `docker build -t apache/superset:mychart --target lean .`
196+
7. Rebuild your production container using `FROM apache/superset:mychart`.
197+
198+
This will create a new productized superset container with your new chart compiled in.
199+
Then you can recreate your custom production container based on a superset built with your chart.
200+
201+
##### Troubleshooting
202+
203+
204+
- If you get the following NPM error:
205+
206+
```
207+
npm error `npm ci` can only install packages when your package.json and package-lock.json
208+
```
209+
210+
It's because your local nodejs/npm version is different than the one being used inside docker.
211+
212+
You can resolve this by running npm install with the same version used by the container build process
213+
214+
Replace XYZ in the following command with the node tag used in the Dockerfile (search for "node:" in the Dockerfile to find the tag).
215+
```bash
216+
docker run --rm -v $PWD/superset-frontend:/app node:XYZ /bin/bash -c "cd /app && npm i"
217+
```
218+
169219
## Testing
170220
171221
### Python Testing

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ dependencies = [
4848
"cryptography>=42.0.4, <45.0.0",
4949
"deprecation>=2.1.0, <2.2.0",
5050
"flask>=2.2.5, <3.0.0",
51-
"flask-appbuilder>=5.0.0,<6",
51+
"flask-appbuilder>=5.0.2,<6",
5252
"flask-caching>=2.1.0, <3",
5353
"flask-compress>=1.13, <2.0",
5454
"flask-talisman>=1.0.0, <2.0",

requirements/base.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ flask==2.3.3
116116
# flask-session
117117
# flask-sqlalchemy
118118
# flask-wtf
119-
flask-appbuilder==5.0.0
119+
flask-appbuilder==5.0.2
120120
# via
121121
# apache-superset (pyproject.toml)
122122
# apache-superset-core

0 commit comments

Comments
 (0)