From 9b1105390d334a0c18bfa4c801f1863f24f1b134 Mon Sep 17 00:00:00 2001 From: Srini Date: Fri, 27 Sep 2024 16:26:10 +0530 Subject: [PATCH 01/12] create custom superset Dockerfile --- docker/superset/Dockerfile | 2 + docker/superset/README.md | 20 +++++ docker/superset/docker/README.md | 75 ++++++++++++++++++ docker/superset/docker/docker-bootstrap.sh | 60 ++++++++++++++ docker/superset/docker/docker-ci.sh | 26 +++++++ docker/superset/docker/docker-frontend.sh | 39 ++++++++++ docker/superset/docker/docker-init.sh | 78 +++++++++++++++++++ docker/superset/docker/frontend-mem-nag.sh | 49 ++++++++++++ docker/superset/docker/requirements-local.txt | 1 + docker/superset/docker/run-server.sh | 36 +++++++++ 10 files changed, 386 insertions(+) create mode 100644 docker/superset/Dockerfile create mode 100644 docker/superset/README.md create mode 100644 docker/superset/docker/README.md create mode 100755 docker/superset/docker/docker-bootstrap.sh create mode 100755 docker/superset/docker/docker-ci.sh create mode 100755 docker/superset/docker/docker-frontend.sh create mode 100755 docker/superset/docker/docker-init.sh create mode 100755 docker/superset/docker/frontend-mem-nag.sh create mode 100644 docker/superset/docker/requirements-local.txt create mode 100644 docker/superset/docker/run-server.sh diff --git a/docker/superset/Dockerfile b/docker/superset/Dockerfile new file mode 100644 index 000000000..c58843dec --- /dev/null +++ b/docker/superset/Dockerfile @@ -0,0 +1,2 @@ +FROM apache/superset:4.0.2 +COPY docker/ /app/docker/ diff --git a/docker/superset/README.md b/docker/superset/README.md new file mode 100644 index 000000000..aaf540a78 --- /dev/null +++ b/docker/superset/README.md @@ -0,0 +1,20 @@ + +## Superset: + + The `Dockerfile` has neccessary scripts needed to run superset - a data visualization and data exploration platform. + + `docker` directory consists of neccessary file and scripts to bootstrap and run processes to bring up superset. + + +## TL;DR + +1. `docker-bootstrap.sh` - This script install neccessary python modules which are defined in **/docker/requirements-local.txt** file +2. `docker-init.sh`: This script upgrades schema( in postgres) for superset and setup admin user and password for superset +3. `run-server.sh`: This script runs actual flask app i.e., superset + +## Build Docker Image +```sh +docker build -t ghcr.io/datakaveri/superset:4.0.2-1 +``` + + diff --git a/docker/superset/docker/README.md b/docker/superset/docker/README.md new file mode 100644 index 000000000..be29bbec0 --- /dev/null +++ b/docker/superset/docker/README.md @@ -0,0 +1,75 @@ + + +# Getting Started with Superset using Docker + +Docker is an easy way to get started with Superset. + +## Prerequisites + +1. [Docker](https://www.docker.com/get-started) +2. [Docker Compose](https://docs.docker.com/compose/install/) + +## Configuration + +The `/app/pythonpath` folder is mounted from [`./docker/pythonpath_dev`](./pythonpath_dev) +which contains a base configuration [`./docker/pythonpath_dev/superset_config.py`](./pythonpath_dev/superset_config.py) +intended for use with local development. + +### Local overrides + +In order to override configuration settings locally, simply make a copy of [`./docker/pythonpath_dev/superset_config_local.example`](./pythonpath_dev/superset_config_local.example) +into `./docker/pythonpath_dev/superset_config_docker.py` (git ignored) and fill in your overrides. + +### Local packages + +If you want to add Python packages in order to test things like databases locally, you can simply add a local requirements.txt (`./docker/requirements-local.txt`) +and rebuild your Docker stack. + +Steps: + +1. Create `./docker/requirements-local.txt` +2. Add your new packages +3. Rebuild docker compose + 1. `docker compose down -v` + 2. `docker compose up` + +## Initializing Database + +The database will initialize itself upon startup via the init container ([`superset-init`](./docker-init.sh)). This may take a minute. + +## Normal Operation + +To run the container, simply run: `docker compose up` + +After waiting several minutes for Superset initialization to finish, you can open a browser and view [`http://localhost:8088`](http://localhost:8088) +to start your journey. + +## Developing + +While running, the container server will reload on modification of the Superset Python and JavaScript source code. +Don't forget to reload the page to take the new frontend into account though. + +## Production + +It is possible to run Superset in non-development mode by using [`docker-compose-non-dev.yml`](../docker-compose-non-dev.yml). This file excludes the volumes needed for development and uses [`./docker/.env-non-dev`](./.env-non-dev) which sets the variable `SUPERSET_ENV` to `production`. + +## Resource Constraints + +If you are attempting to build on macOS and it exits with 137 you need to increase your Docker resources. See instructions [here](https://docs.docker.com/docker-for-mac/#advanced) (search for memory) diff --git a/docker/superset/docker/docker-bootstrap.sh b/docker/superset/docker/docker-bootstrap.sh new file mode 100755 index 000000000..2f0b29ce3 --- /dev/null +++ b/docker/superset/docker/docker-bootstrap.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -eo pipefail + +REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt" +# If Cypress run – overwrite the password for admin and export env variables +if [ "$CYPRESS_CONFIG" == "true" ]; then + export SUPERSET_CONFIG=tests.integration_tests.superset_test_config + export SUPERSET_TESTENV=true + export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset +fi +# +# Make sure we have dev requirements installed +# +if [ -f "${REQUIREMENTS_LOCAL}" ]; then + echo "Installing local overrides at ${REQUIREMENTS_LOCAL}" + pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}" +else + echo "Skipping local overrides" +fi + +case "${1}" in + worker) + echo "Starting Celery worker..." + # setting up only 2 workers by default to contain memory usage in dev environments + celery --app=superset.tasks.celery_app:app worker -O fair -l INFO --concurrency=${CELERYD_CONCURRENCY:-2} + ;; + beat) + echo "Starting Celery beat..." + rm -f /tmp/celerybeat.pid + celery --app=superset.tasks.celery_app:app beat --pidfile /tmp/celerybeat.pid -l INFO -s "${SUPERSET_HOME}"/celerybeat-schedule + ;; + app) + echo "Starting web app (using development server)..." + flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0 + ;; + app-gunicorn) + echo "Starting web app..." + /usr/bin/run-server.sh + ;; + *) + echo "Unknown Operation!!!" + ;; +esac diff --git a/docker/superset/docker/docker-ci.sh b/docker/superset/docker/docker-ci.sh new file mode 100755 index 000000000..9e97cbbad --- /dev/null +++ b/docker/superset/docker/docker-ci.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +/app/docker/docker-init.sh + +# TODO: copy config overrides from ENV vars + +# TODO: run celery in detached state +export SERVER_THREADS_AMOUNT=8 +# start up the web server + +/usr/bin/run-server.sh diff --git a/docker/superset/docker/docker-frontend.sh b/docker/superset/docker/docker-frontend.sh new file mode 100755 index 000000000..bb46c07f9 --- /dev/null +++ b/docker/superset/docker/docker-frontend.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -e + +# Packages needed for puppeteer: +if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = "false" ]; then + apt update + apt install -y chromium +fi + +if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then + echo "Building Superset frontend in dev mode inside docker container" + cd /app/superset-frontend + + echo "Running `npm install`" + npm install + + echo "Running frontend" + npm run dev + +else + echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!" + echo "https://superset.apache.org/docs/contributing/development/#webpack-dev-server" +fi diff --git a/docker/superset/docker/docker-init.sh b/docker/superset/docker/docker-init.sh new file mode 100755 index 000000000..397bb7d98 --- /dev/null +++ b/docker/superset/docker/docker-init.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -e + +# +# Always install local overrides first +# +/app/docker/docker-bootstrap.sh + +STEP_CNT=4 + +echo_step() { +cat < Date: Fri, 27 Sep 2024 17:38:59 +0530 Subject: [PATCH 02/12] create superset stack and miscellaneous files --- Docker-Swarm-deployment/analytics/README.md | 30 ++++ .../analytics}/docker/README.md | 0 .../analytics/docker/docker-bootstrap.sh | 60 +++++++ .../analytics/docker/docker-ci.sh | 26 ++++ .../analytics/docker/docker-frontend.sh | 39 +++++ .../analytics/docker/docker-init.sh | 78 ++++++++++ .../analytics/docker/frontend-mem-nag.sh | 49 ++++++ .../analytics/docker/requirements-local.txt | 1 + .../analytics/docker/run-server.sh | 36 +++++ .../analytics/superset-stack.yaml | 146 ++++++++++++++++++ .../analytics/superset_env_file | 73 +++++++++ docker/superset/README.md | 10 +- 12 files changed, 543 insertions(+), 5 deletions(-) create mode 100644 Docker-Swarm-deployment/analytics/README.md rename {docker/superset => Docker-Swarm-deployment/analytics}/docker/README.md (100%) create mode 100755 Docker-Swarm-deployment/analytics/docker/docker-bootstrap.sh create mode 100755 Docker-Swarm-deployment/analytics/docker/docker-ci.sh create mode 100755 Docker-Swarm-deployment/analytics/docker/docker-frontend.sh create mode 100755 Docker-Swarm-deployment/analytics/docker/docker-init.sh create mode 100755 Docker-Swarm-deployment/analytics/docker/frontend-mem-nag.sh create mode 100644 Docker-Swarm-deployment/analytics/docker/requirements-local.txt create mode 100644 Docker-Swarm-deployment/analytics/docker/run-server.sh create mode 100644 Docker-Swarm-deployment/analytics/superset-stack.yaml create mode 100644 Docker-Swarm-deployment/analytics/superset_env_file diff --git a/Docker-Swarm-deployment/analytics/README.md b/Docker-Swarm-deployment/analytics/README.md new file mode 100644 index 000000000..cf61fdca1 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/README.md @@ -0,0 +1,30 @@ +## Getting started with Superset(visualization tool) using docker swarm Deploy + +To be begin with, in order to deploy superset stack first we need to pass appropriate environment variables to customize superset and to establishes connection with backend components. + +#### Setting up environment variables: + +- Create `.env` (hidden) file at the same directory level as your docker stack file. +- Copy `superset_env_file` content into `.env` file and replace placeholders with actual values. + + +#### To deploy: +```sh +docker stack deploy -c superset-stack.yaml superset +``` + +#### To Check the status : +```sh +docker service ls + +ID NAME MODE REPLICAS IMAGE PORTS + +7ztp4yx1d1gc superset_redis replicated 1/1 redis:7 +k2lkdttsrgrw superset_superset replicated 1/1 ghcr.io/datakaveri/superset:4.0.2-1 *:8088->8088/tcp +ijzzqgxx8rd1 superset_superset-worker replicated 1/1 ghcr.io/datakaveri/superset:4.0.2-1 +x1ojkx3smg0y superset_superset-worker-beat replicated 1/1 ghcr.io/datakaveri/superset:4.0.2-1 +rv2yw340gsd0 superset_superset_init replicated 0/1 ghcr.io/datakaveri/superset:4.0.2-1 +``` + +**superset_superset_init** service will be down once it performs bootstrap operations. + diff --git a/docker/superset/docker/README.md b/Docker-Swarm-deployment/analytics/docker/README.md similarity index 100% rename from docker/superset/docker/README.md rename to Docker-Swarm-deployment/analytics/docker/README.md diff --git a/Docker-Swarm-deployment/analytics/docker/docker-bootstrap.sh b/Docker-Swarm-deployment/analytics/docker/docker-bootstrap.sh new file mode 100755 index 000000000..2f0b29ce3 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/docker/docker-bootstrap.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -eo pipefail + +REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt" +# If Cypress run – overwrite the password for admin and export env variables +if [ "$CYPRESS_CONFIG" == "true" ]; then + export SUPERSET_CONFIG=tests.integration_tests.superset_test_config + export SUPERSET_TESTENV=true + export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset +fi +# +# Make sure we have dev requirements installed +# +if [ -f "${REQUIREMENTS_LOCAL}" ]; then + echo "Installing local overrides at ${REQUIREMENTS_LOCAL}" + pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}" +else + echo "Skipping local overrides" +fi + +case "${1}" in + worker) + echo "Starting Celery worker..." + # setting up only 2 workers by default to contain memory usage in dev environments + celery --app=superset.tasks.celery_app:app worker -O fair -l INFO --concurrency=${CELERYD_CONCURRENCY:-2} + ;; + beat) + echo "Starting Celery beat..." + rm -f /tmp/celerybeat.pid + celery --app=superset.tasks.celery_app:app beat --pidfile /tmp/celerybeat.pid -l INFO -s "${SUPERSET_HOME}"/celerybeat-schedule + ;; + app) + echo "Starting web app (using development server)..." + flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0 + ;; + app-gunicorn) + echo "Starting web app..." + /usr/bin/run-server.sh + ;; + *) + echo "Unknown Operation!!!" + ;; +esac diff --git a/Docker-Swarm-deployment/analytics/docker/docker-ci.sh b/Docker-Swarm-deployment/analytics/docker/docker-ci.sh new file mode 100755 index 000000000..9e97cbbad --- /dev/null +++ b/Docker-Swarm-deployment/analytics/docker/docker-ci.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +/app/docker/docker-init.sh + +# TODO: copy config overrides from ENV vars + +# TODO: run celery in detached state +export SERVER_THREADS_AMOUNT=8 +# start up the web server + +/usr/bin/run-server.sh diff --git a/Docker-Swarm-deployment/analytics/docker/docker-frontend.sh b/Docker-Swarm-deployment/analytics/docker/docker-frontend.sh new file mode 100755 index 000000000..bb46c07f9 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/docker/docker-frontend.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -e + +# Packages needed for puppeteer: +if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = "false" ]; then + apt update + apt install -y chromium +fi + +if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then + echo "Building Superset frontend in dev mode inside docker container" + cd /app/superset-frontend + + echo "Running `npm install`" + npm install + + echo "Running frontend" + npm run dev + +else + echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!" + echo "https://superset.apache.org/docs/contributing/development/#webpack-dev-server" +fi diff --git a/Docker-Swarm-deployment/analytics/docker/docker-init.sh b/Docker-Swarm-deployment/analytics/docker/docker-init.sh new file mode 100755 index 000000000..397bb7d98 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/docker/docker-init.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -e + +# +# Always install local overrides first +# +/app/docker/docker-bootstrap.sh + +STEP_CNT=4 + +echo_step() { +cat < +ADMIN_PASSWORD= + +# database configurations (do not modify) +DATABASE_DB= +DATABASE_HOST= +# Make sure you set this to a unique secure random value on production +DATABASE_PASSWORD= +DATABASE_USER= + +EXAMPLES_DB=examples +EXAMPLES_HOST=no +EXAMPLES_USER=examples +# Make sure you set this to a unique secure random value on production +EXAMPLES_PASSWORD=examples +EXAMPLES_PORT=5432 + +# database engine specific environment variables +# change the below if you prefer another database engine +DATABASE_PORT= + +# Select the appropriate dialect such as postgres, mysql, oracle etc. +DATABASE_DIALECT= + +#pass the below values if your using postgres as database, if not please use appropriate env KEYS for specific database +POSTGRES_DB= +POSTGRES_USER= + +# Make sure you set this to a unique secure random value on production +POSTGRES_PASSWORD= + +#MYSQL_DATABASE=superset +#MYSQL_USER=superset +#MYSQL_PASSWORD=superset +#MYSQL_RANDOM_ROOT_PASSWORD=yes + +# Add the mapped in /app/pythonpath_docker which allows devs to override stuff +PYTHONPATH=/app/pythonpath:/app/docker/pythonpath_dev +REDIS_HOST= +REDIS_PORT= + +FLASK_DEBUG=true +SUPERSET_ENV=production +SUPERSET_LOAD_EXAMPLES=no +CYPRESS_CONFIG=false +SUPERSET_PORT=8088 +MAPBOX_API_KEY='' + +# Make sure you set this to a unique secure random value on production +SUPERSET_SECRET_KEY= + +ENABLE_PLAYWRIGHT=false +PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true +BUILD_SUPERSET_FRONTEND_IN_DOCKER=true + + diff --git a/docker/superset/README.md b/docker/superset/README.md index aaf540a78..5a2b109f1 100644 --- a/docker/superset/README.md +++ b/docker/superset/README.md @@ -1,15 +1,15 @@ -## Superset: +## Superset (a data visualization and data exploration platform) - The `Dockerfile` has neccessary scripts needed to run superset - a data visualization and data exploration platform. + We are creating custom `Dockerfile` which has neccessary scripts to run and bootstrap superset, on top of official superset image - `docker` directory consists of neccessary file and scripts to bootstrap and run processes to bring up superset. + `docker` directory consists of neccessary scripts to bring up superset ## TL;DR -1. `docker-bootstrap.sh` - This script install neccessary python modules which are defined in **/docker/requirements-local.txt** file -2. `docker-init.sh`: This script upgrades schema( in postgres) for superset and setup admin user and password for superset +1. `docker-bootstrap.sh` - This script installs neccessary python modules which are defined in **/docker/requirements-local.txt** file +2. `docker-init.sh`: This script upgrades schema and setup admin user and password for superset 3. `run-server.sh`: This script runs actual flask app i.e., superset ## Build Docker Image From 3c5f186a0415eaab7dd13dd084bd07054dd85a06 Mon Sep 17 00:00:00 2001 From: Srini Date: Fri, 27 Sep 2024 17:59:33 +0530 Subject: [PATCH 03/12] update README --- .../analytics/docker/README.md | 75 ------------------ .../analytics/docker/docker-bootstrap.sh | 60 -------------- .../analytics/docker/docker-ci.sh | 26 ------- .../analytics/docker/docker-frontend.sh | 39 ---------- .../analytics/docker/docker-init.sh | 78 ------------------- .../analytics/docker/frontend-mem-nag.sh | 49 ------------ .../analytics/docker/requirements-local.txt | 1 - .../analytics/docker/run-server.sh | 36 --------- 8 files changed, 364 deletions(-) delete mode 100644 Docker-Swarm-deployment/analytics/docker/README.md delete mode 100755 Docker-Swarm-deployment/analytics/docker/docker-bootstrap.sh delete mode 100755 Docker-Swarm-deployment/analytics/docker/docker-ci.sh delete mode 100755 Docker-Swarm-deployment/analytics/docker/docker-frontend.sh delete mode 100755 Docker-Swarm-deployment/analytics/docker/docker-init.sh delete mode 100755 Docker-Swarm-deployment/analytics/docker/frontend-mem-nag.sh delete mode 100644 Docker-Swarm-deployment/analytics/docker/requirements-local.txt delete mode 100644 Docker-Swarm-deployment/analytics/docker/run-server.sh diff --git a/Docker-Swarm-deployment/analytics/docker/README.md b/Docker-Swarm-deployment/analytics/docker/README.md deleted file mode 100644 index be29bbec0..000000000 --- a/Docker-Swarm-deployment/analytics/docker/README.md +++ /dev/null @@ -1,75 +0,0 @@ - - -# Getting Started with Superset using Docker - -Docker is an easy way to get started with Superset. - -## Prerequisites - -1. [Docker](https://www.docker.com/get-started) -2. [Docker Compose](https://docs.docker.com/compose/install/) - -## Configuration - -The `/app/pythonpath` folder is mounted from [`./docker/pythonpath_dev`](./pythonpath_dev) -which contains a base configuration [`./docker/pythonpath_dev/superset_config.py`](./pythonpath_dev/superset_config.py) -intended for use with local development. - -### Local overrides - -In order to override configuration settings locally, simply make a copy of [`./docker/pythonpath_dev/superset_config_local.example`](./pythonpath_dev/superset_config_local.example) -into `./docker/pythonpath_dev/superset_config_docker.py` (git ignored) and fill in your overrides. - -### Local packages - -If you want to add Python packages in order to test things like databases locally, you can simply add a local requirements.txt (`./docker/requirements-local.txt`) -and rebuild your Docker stack. - -Steps: - -1. Create `./docker/requirements-local.txt` -2. Add your new packages -3. Rebuild docker compose - 1. `docker compose down -v` - 2. `docker compose up` - -## Initializing Database - -The database will initialize itself upon startup via the init container ([`superset-init`](./docker-init.sh)). This may take a minute. - -## Normal Operation - -To run the container, simply run: `docker compose up` - -After waiting several minutes for Superset initialization to finish, you can open a browser and view [`http://localhost:8088`](http://localhost:8088) -to start your journey. - -## Developing - -While running, the container server will reload on modification of the Superset Python and JavaScript source code. -Don't forget to reload the page to take the new frontend into account though. - -## Production - -It is possible to run Superset in non-development mode by using [`docker-compose-non-dev.yml`](../docker-compose-non-dev.yml). This file excludes the volumes needed for development and uses [`./docker/.env-non-dev`](./.env-non-dev) which sets the variable `SUPERSET_ENV` to `production`. - -## Resource Constraints - -If you are attempting to build on macOS and it exits with 137 you need to increase your Docker resources. See instructions [here](https://docs.docker.com/docker-for-mac/#advanced) (search for memory) diff --git a/Docker-Swarm-deployment/analytics/docker/docker-bootstrap.sh b/Docker-Swarm-deployment/analytics/docker/docker-bootstrap.sh deleted file mode 100755 index 2f0b29ce3..000000000 --- a/Docker-Swarm-deployment/analytics/docker/docker-bootstrap.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -set -eo pipefail - -REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt" -# If Cypress run – overwrite the password for admin and export env variables -if [ "$CYPRESS_CONFIG" == "true" ]; then - export SUPERSET_CONFIG=tests.integration_tests.superset_test_config - export SUPERSET_TESTENV=true - export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset -fi -# -# Make sure we have dev requirements installed -# -if [ -f "${REQUIREMENTS_LOCAL}" ]; then - echo "Installing local overrides at ${REQUIREMENTS_LOCAL}" - pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}" -else - echo "Skipping local overrides" -fi - -case "${1}" in - worker) - echo "Starting Celery worker..." - # setting up only 2 workers by default to contain memory usage in dev environments - celery --app=superset.tasks.celery_app:app worker -O fair -l INFO --concurrency=${CELERYD_CONCURRENCY:-2} - ;; - beat) - echo "Starting Celery beat..." - rm -f /tmp/celerybeat.pid - celery --app=superset.tasks.celery_app:app beat --pidfile /tmp/celerybeat.pid -l INFO -s "${SUPERSET_HOME}"/celerybeat-schedule - ;; - app) - echo "Starting web app (using development server)..." - flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0 - ;; - app-gunicorn) - echo "Starting web app..." - /usr/bin/run-server.sh - ;; - *) - echo "Unknown Operation!!!" - ;; -esac diff --git a/Docker-Swarm-deployment/analytics/docker/docker-ci.sh b/Docker-Swarm-deployment/analytics/docker/docker-ci.sh deleted file mode 100755 index 9e97cbbad..000000000 --- a/Docker-Swarm-deployment/analytics/docker/docker-ci.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -/app/docker/docker-init.sh - -# TODO: copy config overrides from ENV vars - -# TODO: run celery in detached state -export SERVER_THREADS_AMOUNT=8 -# start up the web server - -/usr/bin/run-server.sh diff --git a/Docker-Swarm-deployment/analytics/docker/docker-frontend.sh b/Docker-Swarm-deployment/analytics/docker/docker-frontend.sh deleted file mode 100755 index bb46c07f9..000000000 --- a/Docker-Swarm-deployment/analytics/docker/docker-frontend.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -set -e - -# Packages needed for puppeteer: -if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = "false" ]; then - apt update - apt install -y chromium -fi - -if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then - echo "Building Superset frontend in dev mode inside docker container" - cd /app/superset-frontend - - echo "Running `npm install`" - npm install - - echo "Running frontend" - npm run dev - -else - echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!" - echo "https://superset.apache.org/docs/contributing/development/#webpack-dev-server" -fi diff --git a/Docker-Swarm-deployment/analytics/docker/docker-init.sh b/Docker-Swarm-deployment/analytics/docker/docker-init.sh deleted file mode 100755 index 397bb7d98..000000000 --- a/Docker-Swarm-deployment/analytics/docker/docker-init.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -set -e - -# -# Always install local overrides first -# -/app/docker/docker-bootstrap.sh - -STEP_CNT=4 - -echo_step() { -cat < Date: Mon, 30 Sep 2024 12:34:34 +0530 Subject: [PATCH 04/12] move files to superset/ --- Docker-Swarm-deployment/analytics/{ => superset}/README.md | 0 .../analytics/{ => superset}/superset-stack.yaml | 0 .../analytics/{ => superset}/superset_env_file | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename Docker-Swarm-deployment/analytics/{ => superset}/README.md (100%) rename Docker-Swarm-deployment/analytics/{ => superset}/superset-stack.yaml (100%) rename Docker-Swarm-deployment/analytics/{ => superset}/superset_env_file (100%) diff --git a/Docker-Swarm-deployment/analytics/README.md b/Docker-Swarm-deployment/analytics/superset/README.md similarity index 100% rename from Docker-Swarm-deployment/analytics/README.md rename to Docker-Swarm-deployment/analytics/superset/README.md diff --git a/Docker-Swarm-deployment/analytics/superset-stack.yaml b/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml similarity index 100% rename from Docker-Swarm-deployment/analytics/superset-stack.yaml rename to Docker-Swarm-deployment/analytics/superset/superset-stack.yaml diff --git a/Docker-Swarm-deployment/analytics/superset_env_file b/Docker-Swarm-deployment/analytics/superset/superset_env_file similarity index 100% rename from Docker-Swarm-deployment/analytics/superset_env_file rename to Docker-Swarm-deployment/analytics/superset/superset_env_file From 13fd7bcfe0d8f863e039eedadf1a0d5721d80a91 Mon Sep 17 00:00:00 2001 From: Srini Date: Mon, 30 Sep 2024 12:39:53 +0530 Subject: [PATCH 05/12] update superset_env_file --- .../analytics/superset/superset_env_file | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Docker-Swarm-deployment/analytics/superset/superset_env_file b/Docker-Swarm-deployment/analytics/superset/superset_env_file index aef525289..abe27431d 100644 --- a/Docker-Swarm-deployment/analytics/superset/superset_env_file +++ b/Docker-Swarm-deployment/analytics/superset/superset_env_file @@ -19,10 +19,10 @@ ADMIN_USERNAME= ADMIN_PASSWORD= # database configurations (do not modify) -DATABASE_DB= -DATABASE_HOST= +DATABASE_DB=superset +DATABASE_HOST=tasks.postgres # Make sure you set this to a unique secure random value on production -DATABASE_PASSWORD= +DATABASE_PASSWORD= DATABASE_USER= EXAMPLES_DB=examples @@ -34,13 +34,13 @@ EXAMPLES_PORT=5432 # database engine specific environment variables # change the below if you prefer another database engine -DATABASE_PORT= +DATABASE_PORT=5432 # Select the appropriate dialect such as postgres, mysql, oracle etc. -DATABASE_DIALECT= +DATABASE_DIALECT=postgres #pass the below values if your using postgres as database, if not please use appropriate env KEYS for specific database -POSTGRES_DB= +POSTGRES_DB=superset POSTGRES_USER= # Make sure you set this to a unique secure random value on production @@ -53,8 +53,8 @@ POSTGRES_PASSWORD= # Add the mapped in /app/pythonpath_docker which allows devs to override stuff PYTHONPATH=/app/pythonpath:/app/docker/pythonpath_dev -REDIS_HOST= -REDIS_PORT= +REDIS_HOST=redis +REDIS_PORT=6379 FLASK_DEBUG=true SUPERSET_ENV=production From b50d2b4e19c736a4c783c9e7ab30f83fde6f2cda Mon Sep 17 00:00:00 2001 From: Srini Date: Mon, 7 Oct 2024 11:09:38 +0530 Subject: [PATCH 06/12] add custom resource file --- .../superset/example-superset.resources.yaml | 42 +++++++++++++++++++ .../analytics/superset/superset-stack.yaml | 28 ------------- 2 files changed, 42 insertions(+), 28 deletions(-) create mode 100644 Docker-Swarm-deployment/analytics/superset/example-superset.resources.yaml diff --git a/Docker-Swarm-deployment/analytics/superset/example-superset.resources.yaml b/Docker-Swarm-deployment/analytics/superset/example-superset.resources.yaml new file mode 100644 index 000000000..4093b8440 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/example-superset.resources.yaml @@ -0,0 +1,42 @@ +version: '3.9' +services: + redis:: + deploy: + resources: + limits: + pids: 8192 + cpus: '2.0' + memory: 4G + reservations: + cpus: '1' + memory: 2M + superset: + deploy: + resources: + limits: + pids: 8192 + cpus: '2.0' + memory: 6G + reservations: + cpus: '2' + memory: 4G + superset-worker: + deploy: + resources: + limits: + pids: 8192 + cpus: '2.0' + memory: 4G + reservations: + cpus: '1' + memory: 2G + superset-worker-beat: + deploy: + resources: + limits: + pids: 8192 + cpus: '2.0' + memory: 4G + reservations: + cpus: '1' + memory: 2G diff --git a/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml b/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml index b8addfe90..5c43a9a3e 100644 --- a/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml +++ b/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml @@ -10,13 +10,6 @@ services: restart_policy: condition: any max_attempts: 5 - resources: - limits: - cpus: '2' - memory: 4G - reservations: - cpus: '1' - memory: 2G volumes: - redis:/data @@ -59,13 +52,6 @@ services: restart_policy: condition: any max_attempts: 5 - resources: - limits: - cpus: '2' - memory: 6G - reservations: - cpus: '2' - memory: 4G logging: driver: "json-file" options: @@ -87,13 +73,6 @@ services: restart_policy: condition: any max_attempts: 5 - resources: - limits: - cpus: '2' - memory: 4G - reservations: - cpus: '1' - memory: 2G healthcheck: test: [ @@ -118,13 +97,6 @@ services: restart_policy: condition: any max_attempts: 5 - resources: - limits: - cpus: '2' - memory: 4G - reservations: - cpus: '1' - memory: 2G healthcheck: disable: true networks: From 0d0b802d465abb4318ab474f895dcc0ea51bbda4 Mon Sep 17 00:00:00 2001 From: Srini Date: Thu, 12 Dec 2024 16:41:25 +0530 Subject: [PATCH 07/12] update superset config files --- .../superset/{superset_env_file => .env} | 0 .../analytics/superset/Dockerfile | 10 + .../analytics/superset/docker/README.md | 75 + .../superset/docker/docker-bootstrap.sh | 60 + .../analytics/superset/docker/docker-ci.sh | 26 + .../superset/docker/docker-frontend.sh | 39 + .../analytics/superset/docker/docker-init.sh | 78 + .../superset/docker/frontend-mem-nag.sh | 49 + .../docker/pythonpath_dev/superset_config.py | 328 +++ .../superset/docker/requirements-local.txt | 5 + .../analytics/superset/docker/run-server.sh | 36 + .../.dockerignore | 4 + .../gra-superset-guesttoken-middlware/.env | 7 + .../.gitignore | 28 + .../Dockerfile | 14 + .../README.md | 8 + .../package-lock.json | 784 +++++++ .../package.json | 21 + .../pnpm-lock.yaml | 549 +++++ .../src/controllers/guestTokenController.ts | 33 + .../src/envParser.ts | 8 + .../src/index.ts | 49 + .../src/routes/guestTokenRoutes.ts | 21 + .../src/services/guestTokenService.ts | 17 + .../src/services/supersetService.ts | 63 + .../src/types.ts | 13 + .../superset-middleware-stack.yaml | 31 + .../tsconfig.json | 14 + .../analytics/superset/superset-stack.yaml | 108 +- .../analytics/superset/superset/config.py | 1931 +++++++++++++++++ 30 files changed, 4395 insertions(+), 14 deletions(-) rename Docker-Swarm-deployment/analytics/superset/{superset_env_file => .env} (100%) create mode 100644 Docker-Swarm-deployment/analytics/superset/Dockerfile create mode 100644 Docker-Swarm-deployment/analytics/superset/docker/README.md create mode 100755 Docker-Swarm-deployment/analytics/superset/docker/docker-bootstrap.sh create mode 100755 Docker-Swarm-deployment/analytics/superset/docker/docker-ci.sh create mode 100755 Docker-Swarm-deployment/analytics/superset/docker/docker-frontend.sh create mode 100755 Docker-Swarm-deployment/analytics/superset/docker/docker-init.sh create mode 100755 Docker-Swarm-deployment/analytics/superset/docker/frontend-mem-nag.sh create mode 100644 Docker-Swarm-deployment/analytics/superset/docker/pythonpath_dev/superset_config.py create mode 100644 Docker-Swarm-deployment/analytics/superset/docker/requirements-local.txt create mode 100644 Docker-Swarm-deployment/analytics/superset/docker/run-server.sh create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.dockerignore create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.env create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.gitignore create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/Dockerfile create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/README.md create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/package-lock.json create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/package.json create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/pnpm-lock.yaml create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/controllers/guestTokenController.ts create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/envParser.ts create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/index.ts create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/routes/guestTokenRoutes.ts create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/services/guestTokenService.ts create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/services/supersetService.ts create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/types.ts create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/superset-middleware-stack.yaml create mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/tsconfig.json create mode 100644 Docker-Swarm-deployment/analytics/superset/superset/config.py diff --git a/Docker-Swarm-deployment/analytics/superset/superset_env_file b/Docker-Swarm-deployment/analytics/superset/.env similarity index 100% rename from Docker-Swarm-deployment/analytics/superset/superset_env_file rename to Docker-Swarm-deployment/analytics/superset/.env diff --git a/Docker-Swarm-deployment/analytics/superset/Dockerfile b/Docker-Swarm-deployment/analytics/superset/Dockerfile new file mode 100644 index 000000000..f20779187 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/Dockerfile @@ -0,0 +1,10 @@ +FROM ghcr.io/datakaveri/superset:4.0.2-5 +USER root +RUN apt-get update && apt-get install -y \ + gcc \ + python3-dev \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* +RUN pip3 install psycopg2 +USER 1000 + diff --git a/Docker-Swarm-deployment/analytics/superset/docker/README.md b/Docker-Swarm-deployment/analytics/superset/docker/README.md new file mode 100644 index 000000000..be29bbec0 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/docker/README.md @@ -0,0 +1,75 @@ + + +# Getting Started with Superset using Docker + +Docker is an easy way to get started with Superset. + +## Prerequisites + +1. [Docker](https://www.docker.com/get-started) +2. [Docker Compose](https://docs.docker.com/compose/install/) + +## Configuration + +The `/app/pythonpath` folder is mounted from [`./docker/pythonpath_dev`](./pythonpath_dev) +which contains a base configuration [`./docker/pythonpath_dev/superset_config.py`](./pythonpath_dev/superset_config.py) +intended for use with local development. + +### Local overrides + +In order to override configuration settings locally, simply make a copy of [`./docker/pythonpath_dev/superset_config_local.example`](./pythonpath_dev/superset_config_local.example) +into `./docker/pythonpath_dev/superset_config_docker.py` (git ignored) and fill in your overrides. + +### Local packages + +If you want to add Python packages in order to test things like databases locally, you can simply add a local requirements.txt (`./docker/requirements-local.txt`) +and rebuild your Docker stack. + +Steps: + +1. Create `./docker/requirements-local.txt` +2. Add your new packages +3. Rebuild docker compose + 1. `docker compose down -v` + 2. `docker compose up` + +## Initializing Database + +The database will initialize itself upon startup via the init container ([`superset-init`](./docker-init.sh)). This may take a minute. + +## Normal Operation + +To run the container, simply run: `docker compose up` + +After waiting several minutes for Superset initialization to finish, you can open a browser and view [`http://localhost:8088`](http://localhost:8088) +to start your journey. + +## Developing + +While running, the container server will reload on modification of the Superset Python and JavaScript source code. +Don't forget to reload the page to take the new frontend into account though. + +## Production + +It is possible to run Superset in non-development mode by using [`docker-compose-non-dev.yml`](../docker-compose-non-dev.yml). This file excludes the volumes needed for development and uses [`./docker/.env-non-dev`](./.env-non-dev) which sets the variable `SUPERSET_ENV` to `production`. + +## Resource Constraints + +If you are attempting to build on macOS and it exits with 137 you need to increase your Docker resources. See instructions [here](https://docs.docker.com/docker-for-mac/#advanced) (search for memory) diff --git a/Docker-Swarm-deployment/analytics/superset/docker/docker-bootstrap.sh b/Docker-Swarm-deployment/analytics/superset/docker/docker-bootstrap.sh new file mode 100755 index 000000000..2f0b29ce3 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/docker/docker-bootstrap.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -eo pipefail + +REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt" +# If Cypress run – overwrite the password for admin and export env variables +if [ "$CYPRESS_CONFIG" == "true" ]; then + export SUPERSET_CONFIG=tests.integration_tests.superset_test_config + export SUPERSET_TESTENV=true + export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset +fi +# +# Make sure we have dev requirements installed +# +if [ -f "${REQUIREMENTS_LOCAL}" ]; then + echo "Installing local overrides at ${REQUIREMENTS_LOCAL}" + pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}" +else + echo "Skipping local overrides" +fi + +case "${1}" in + worker) + echo "Starting Celery worker..." + # setting up only 2 workers by default to contain memory usage in dev environments + celery --app=superset.tasks.celery_app:app worker -O fair -l INFO --concurrency=${CELERYD_CONCURRENCY:-2} + ;; + beat) + echo "Starting Celery beat..." + rm -f /tmp/celerybeat.pid + celery --app=superset.tasks.celery_app:app beat --pidfile /tmp/celerybeat.pid -l INFO -s "${SUPERSET_HOME}"/celerybeat-schedule + ;; + app) + echo "Starting web app (using development server)..." + flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0 + ;; + app-gunicorn) + echo "Starting web app..." + /usr/bin/run-server.sh + ;; + *) + echo "Unknown Operation!!!" + ;; +esac diff --git a/Docker-Swarm-deployment/analytics/superset/docker/docker-ci.sh b/Docker-Swarm-deployment/analytics/superset/docker/docker-ci.sh new file mode 100755 index 000000000..9e97cbbad --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/docker/docker-ci.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +/app/docker/docker-init.sh + +# TODO: copy config overrides from ENV vars + +# TODO: run celery in detached state +export SERVER_THREADS_AMOUNT=8 +# start up the web server + +/usr/bin/run-server.sh diff --git a/Docker-Swarm-deployment/analytics/superset/docker/docker-frontend.sh b/Docker-Swarm-deployment/analytics/superset/docker/docker-frontend.sh new file mode 100755 index 000000000..bb46c07f9 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/docker/docker-frontend.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -e + +# Packages needed for puppeteer: +if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = "false" ]; then + apt update + apt install -y chromium +fi + +if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then + echo "Building Superset frontend in dev mode inside docker container" + cd /app/superset-frontend + + echo "Running `npm install`" + npm install + + echo "Running frontend" + npm run dev + +else + echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!" + echo "https://superset.apache.org/docs/contributing/development/#webpack-dev-server" +fi diff --git a/Docker-Swarm-deployment/analytics/superset/docker/docker-init.sh b/Docker-Swarm-deployment/analytics/superset/docker/docker-init.sh new file mode 100755 index 000000000..397bb7d98 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/docker/docker-init.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -e + +# +# Always install local overrides first +# +/app/docker/docker-bootstrap.sh + +STEP_CNT=4 + +echo_step() { +cat < None: + app.before_request_funcs.setdefault(None, []).append(middleware_function) + # app.before_request_funcs.setdefault(None, []).extend([middleware_function, prometheus_metrics]) + + + +# ? Enable Structured logging for Loki for easier log parsing +# LOGGING = {` +# 'version': 1, +# 'disable_existing_loggers': False, +# 'handlers': { +# 'file': { +# 'level': 'DEBUG', +# 'class': 'logging.handlers.RotatingFileHandler', +# 'filename': '/var/log/superset/superset.log', +# 'maxBytes': 1024000, # 1MB log size +# 'backupCount': 3, +# }, +# }, +# 'loggers': { +# 'superset': { +# 'level': 'DEBUG', +# 'handlers': ['file'], +# }, +# }, +# }` + +TALISMAN_ENABLED = True +TALISMAN_CONFIG = { + "content_security_policy": { + "base-uri": ["'self'"], + "default-src": ["'self'"], + "img-src": [ + "'self'", + "blob:", + "data:", + "https://apachesuperset.gateway.scarf.sh", + "https://static.scarf.sh/", + "https://avatars.slack-edge.com", + ], + "worker-src": ["'self'", "blob:"], + "connect-src": [ + "'self'", + "https://api.mapbox.com", + "https://events.mapbox.com", + ], + "object-src": "'none'", + "style-src": [ + "'self'", + "'unsafe-inline'", + ], + "frame-ancestors": ["*"], + "script-src": ["'self'", "'unsafe-inline'", "'unsafe-eval'"], + }, + "content_security_policy_nonce_in": ["script-src"], + "force_https": False, + "session_cookie_secure": False, +} + +GUEST_ROLE_NAME= 'embed_dashboard' +GUEST_TOKEN_JWT_SECRET = "test-guest-secret-change-me" +GUEST_TOKEN_JWT_EXP_SECONDS = 3600 +CORS_OPTIONS = { + "supports_credentials": True, + "allow_headers": ["*"], + "resources": ["*"], + # TODO: Add allowed domains + "origins": ["https://catalogue.cos.idxgh.com/*"] +} + + + +DATABASE_DIALECT = os.getenv("DATABASE_DIALECT") +DATABASE_USER = os.getenv("DATABASE_USER") +DATABASE_PASSWORD = os.getenv("DATABASE_PASSWORD") +DATABASE_HOST = os.getenv("DATABASE_HOST") +DATABASE_PORT = os.getenv("DATABASE_PORT") +DATABASE_DB = os.getenv("DATABASE_DB") + +EXAMPLES_USER = os.getenv("EXAMPLES_USER") +EXAMPLES_PASSWORD = os.getenv("EXAMPLES_PASSWORD") +EXAMPLES_HOST = os.getenv("EXAMPLES_HOST") +EXAMPLES_PORT = os.getenv("EXAMPLES_PORT") +EXAMPLES_DB = os.getenv("EXAMPLES_DB") + +# The SQLAlchemy connection string. +SQLALCHEMY_DATABASE_URI = ( + f"{DATABASE_DIALECT}://" + f"{DATABASE_USER}:{DATABASE_PASSWORD}@" + f"{DATABASE_HOST}:{DATABASE_PORT}/{DATABASE_DB}" +) + +SQLALCHEMY_EXAMPLES_URI = ( + f"{DATABASE_DIALECT}://" + f"{EXAMPLES_USER}:{EXAMPLES_PASSWORD}@" + f"{EXAMPLES_HOST}:{EXAMPLES_PORT}/{EXAMPLES_DB}" +) + +REDIS_HOST = os.getenv("REDIS_HOST", "redis") +REDIS_PORT = os.getenv("REDIS_PORT", "6379") +REDIS_CELERY_DB = os.getenv("REDIS_CELERY_DB", "0") +REDIS_RESULTS_DB = os.getenv("REDIS_RESULTS_DB", "1") + +RESULTS_BACKEND = FileSystemCache("/app/superset_home/sqllab") + +CACHE_CONFIG = { + "CACHE_TYPE": "RedisCache", + "CACHE_DEFAULT_TIMEOUT": 300, + "CACHE_KEY_PREFIX": "superset_", + "CACHE_REDIS_HOST": REDIS_HOST, + "CACHE_REDIS_PORT": REDIS_PORT, + "CACHE_REDIS_DB": REDIS_RESULTS_DB, +} +DATA_CACHE_CONFIG = CACHE_CONFIG + +AUTH_ROLES_MAPPING = { + 'SUPERSET_USERS': ['Admin'], + 'SUPSERSET_ADMIN': ['Admin'], + 'SUPSERSET_ALPHA': ["Admin"], + 'SUPSERSET_GAMMA': ["Admin"], +} + +AUTH_ROLES_SYNC_AT_LOGIN = False + + +class CeleryConfig: + broker_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}" + imports = ( + "superset.sql_lab", + "superset.tasks.scheduler", + "superset.tasks.thumbnails", + "superset.tasks.cache", + ) + result_backend = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}" + worker_prefetch_multiplier = 1 + task_acks_late = False + beat_schedule = { + "reports.scheduler": { + "task": "reports.scheduler", + "schedule": crontab(minute="*", hour="*"), + }, + "reports.prune_log": { + "task": "reports.prune_log", + "schedule": crontab(minute=10, hour=0), + }, + } + + +CELERY_CONFIG = CeleryConfig + +FEATURE_FLAGS = { + "ALERT_REPORTS": True, + # ? Enable Dashboard RBAC in UI + "DASHBOARD_RBAC": True +} +ALERT_REPORTS_NOTIFICATION_DRY_RUN = True +WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/ +# The base URL for the email report hyperlinks. +WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL +SQLLAB_CTAS_NO_LIMIT = True + +# +# Optionally import superset_config_docker.py (which will have been included on +# the PYTHONPATH) in order to allow for local settings to be overridden +# +try: + import superset_config_docker + from superset_config_docker import * # noqa + + logger.info( + f"Loaded your Docker configuration at " f"[{superset_config_docker.__file__}]" + ) +except ImportError: + logger.info("Using default Docker config...") \ No newline at end of file diff --git a/Docker-Swarm-deployment/analytics/superset/docker/requirements-local.txt b/Docker-Swarm-deployment/analytics/superset/docker/requirements-local.txt new file mode 100644 index 000000000..d2fdcd0d0 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/docker/requirements-local.txt @@ -0,0 +1,5 @@ +clickhouse-connect==0.8.6 +jsonpath-ng +flask_oidc +flask_openid +console_log diff --git a/Docker-Swarm-deployment/analytics/superset/docker/run-server.sh b/Docker-Swarm-deployment/analytics/superset/docker/run-server.sh new file mode 100644 index 000000000..270c46f92 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/docker/run-server.sh @@ -0,0 +1,36 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +HYPHEN_SYMBOL='-' + +gunicorn \ + --bind "${SUPERSET_BIND_ADDRESS:-0.0.0.0}:${SUPERSET_PORT:-8088}" \ + --access-logfile "${ACCESS_LOG_FILE:-$HYPHEN_SYMBOL}" \ + --error-logfile "${ERROR_LOG_FILE:-$HYPHEN_SYMBOL}" \ + --workers ${SERVER_WORKER_AMOUNT:-1} \ + --worker-class ${SERVER_WORKER_CLASS:-gthread} \ + --threads ${SERVER_THREADS_AMOUNT:-20} \ + --log-level "${GUNICORN_LOGLEVEL:info}" \ + --timeout ${GUNICORN_TIMEOUT:-60} \ + --keep-alive ${GUNICORN_KEEPALIVE:-2} \ + --max-requests ${WORKER_MAX_REQUESTS:-0} \ + --max-requests-jitter ${WORKER_MAX_REQUESTS_JITTER:-0} \ + --limit-request-line ${SERVER_LIMIT_REQUEST_LINE:-0} \ + --limit-request-field_size ${SERVER_LIMIT_REQUEST_FIELD_SIZE:-0} \ + "${FLASK_APP}" diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.dockerignore b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.dockerignore new file mode 100644 index 000000000..3b37ed8e7 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.dockerignore @@ -0,0 +1,4 @@ +Dockerfile +.env +superset* + diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.env b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.env new file mode 100644 index 000000000..46631bb1f --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.env @@ -0,0 +1,7 @@ +SUPERSET_URL=https:///api/v1/security/ +# KEYCLOCK_URL=https:///auth/ +# KEYCLOCK_CLIENT_ID= +APP_PORT=8000 +#DASHBOARD_ID= +SUPERSET_ADMIN='' +SUPERSET_ADMIN_PASSWORD='' diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.gitignore b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.gitignore new file mode 100644 index 000000000..36fabb6cb --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.gitignore @@ -0,0 +1,28 @@ +# dev +.yarn/ +!.yarn/releases +.vscode/* +!.vscode/launch.json +!.vscode/*.code-snippets +.idea/workspace.xml +.idea/usage.statistics.xml +.idea/shelf + +# deps +node_modules/ + +# env +.env +.env.production + +# logs +logs/ +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +# misc +.DS_Store diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/Dockerfile b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/Dockerfile new file mode 100644 index 000000000..28ad64668 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/Dockerfile @@ -0,0 +1,14 @@ +FROM node:20 + +WORKDIR /app + +COPY package.json package-lock.json +# Install dependencies +COPY . . +RUN npm install +# Expose the application port +EXPOSE 8000 + +# Command to run the application +CMD ["npm" ,"run", "dev"] + diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/README.md b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/README.md new file mode 100644 index 000000000..1178a7ba3 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/README.md @@ -0,0 +1,8 @@ +``` +npm install +npm run dev +``` + +``` +open http://localhost:8000 +``` diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/package-lock.json b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/package-lock.json new file mode 100644 index 000000000..9ee2ebf14 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/package-lock.json @@ -0,0 +1,784 @@ +{ + "name": "iudx_auth_server", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "iudx_auth_server", + "dependencies": { + "@hono/node-server": "^1.13.7", + "@hono/zod-validator": "^0.4.1", + "axios": "^1.7.7", + "dotenv": "^16.4.5", + "hono": "^4.6.12", + "jsonwebtoken": "^9.0.2", + "jwt-decode": "^4.0.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@types/node": "^20.11.17", + "tsx": "^4.7.1" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.1.tgz", + "integrity": "sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.23.1.tgz", + "integrity": "sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.23.1.tgz", + "integrity": "sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.23.1.tgz", + "integrity": "sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.23.1.tgz", + "integrity": "sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.23.1.tgz", + "integrity": "sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.23.1.tgz", + "integrity": "sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.23.1.tgz", + "integrity": "sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.23.1.tgz", + "integrity": "sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.23.1.tgz", + "integrity": "sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.23.1.tgz", + "integrity": "sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.23.1.tgz", + "integrity": "sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.23.1.tgz", + "integrity": "sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.23.1.tgz", + "integrity": "sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.23.1.tgz", + "integrity": "sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.23.1.tgz", + "integrity": "sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.23.1.tgz", + "integrity": "sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.1.tgz", + "integrity": "sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.23.1.tgz", + "integrity": "sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.23.1.tgz", + "integrity": "sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.1.tgz", + "integrity": "sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.23.1.tgz", + "integrity": "sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.23.1.tgz", + "integrity": "sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.23.1.tgz", + "integrity": "sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@hono/node-server": { + "version": "1.13.7", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.13.7.tgz", + "integrity": "sha512-kTfUMsoloVKtRA2fLiGSd9qBddmru9KadNyhJCwgKBxTiNkaAJEwkVN9KV/rS4HtmmNRtUh6P+YpmjRMl0d9vQ==", + "engines": { + "node": ">=18.14.1" + }, + "peerDependencies": { + "hono": "^4" + } + }, + "node_modules/@hono/zod-validator": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@hono/zod-validator/-/zod-validator-0.4.1.tgz", + "integrity": "sha512-I8LyfeJfvVmC5hPjZ2Iij7RjexlgSBT7QJudZ4JvNPLxn0JQ3sqclz2zydlwISAnw21D2n4LQ0nfZdoiv9fQQA==", + "peerDependencies": { + "hono": ">=3.9.0", + "zod": "^3.19.1" + } + }, + "node_modules/@types/node": { + "version": "20.17.7", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.7.tgz", + "integrity": "sha512-sZXXnpBFMKbao30dUAvzKbdwA2JM1fwUtVEq/kxKuPI5mMwZiRElCpTXb0Biq/LMEVpXDZL5G5V0RPnxKeyaYg==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/axios": { + "version": "1.7.7", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz", + "integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/dotenv": { + "version": "16.4.5", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", + "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/esbuild": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.23.1.tgz", + "integrity": "sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.23.1", + "@esbuild/android-arm": "0.23.1", + "@esbuild/android-arm64": "0.23.1", + "@esbuild/android-x64": "0.23.1", + "@esbuild/darwin-arm64": "0.23.1", + "@esbuild/darwin-x64": "0.23.1", + "@esbuild/freebsd-arm64": "0.23.1", + "@esbuild/freebsd-x64": "0.23.1", + "@esbuild/linux-arm": "0.23.1", + "@esbuild/linux-arm64": "0.23.1", + "@esbuild/linux-ia32": "0.23.1", + "@esbuild/linux-loong64": "0.23.1", + "@esbuild/linux-mips64el": "0.23.1", + "@esbuild/linux-ppc64": "0.23.1", + "@esbuild/linux-riscv64": "0.23.1", + "@esbuild/linux-s390x": "0.23.1", + "@esbuild/linux-x64": "0.23.1", + "@esbuild/netbsd-x64": "0.23.1", + "@esbuild/openbsd-arm64": "0.23.1", + "@esbuild/openbsd-x64": "0.23.1", + "@esbuild/sunos-x64": "0.23.1", + "@esbuild/win32-arm64": "0.23.1", + "@esbuild/win32-ia32": "0.23.1", + "@esbuild/win32-x64": "0.23.1" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz", + "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-tsconfig": { + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.8.1.tgz", + "integrity": "sha512-k9PN+cFBmaLWtVz29SkUoqU5O0slLuHJXt/2P+tMVFT+phsSGXGkp9t3rQIqdz0e+06EHNGs3oM6ZX1s2zHxRg==", + "dev": true, + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/hono": { + "version": "4.6.12", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.6.12.tgz", + "integrity": "sha512-eHtf4kSDNw6VVrdbd5IQi16r22m3s7mWPLd7xOMhg1a/Yyb1A0qpUFq8xYMX4FMuDe1nTKeMX5rTx7Nmw+a+Ag==", + "engines": { + "node": ">=16.9.0" + } + }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jwt-decode": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-4.0.0.tgz", + "integrity": "sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==", + "engines": { + "node": ">=18" + } + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tsx": { + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.19.2.tgz", + "integrity": "sha512-pOUl6Vo2LUq/bSa8S5q7b91cgNSjctn9ugq/+Mvow99qW6x/UZYwzxy/3NmqoT66eHYfCVvFvACC58UBPFf28g==", + "dev": true, + "dependencies": { + "esbuild": "~0.23.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true + }, + "node_modules/zod": { + "version": "3.23.8", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", + "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + } + } +} diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/package.json b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/package.json new file mode 100644 index 000000000..767eedfcf --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/package.json @@ -0,0 +1,21 @@ +{ + "name": "iudx_auth_server", + "type": "module", + "scripts": { + "dev": "tsx watch src/index.ts" + }, + "dependencies": { + "@hono/node-server": "^1.13.7", + "@hono/zod-validator": "^0.4.1", + "axios": "^1.7.7", + "dotenv": "^16.4.5", + "hono": "^4.6.12", + "jsonwebtoken": "^9.0.2", + "jwt-decode": "^4.0.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@types/node": "^20.11.17", + "tsx": "^4.7.1" + } +} \ No newline at end of file diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/pnpm-lock.yaml b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/pnpm-lock.yaml new file mode 100644 index 000000000..350ab748d --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/pnpm-lock.yaml @@ -0,0 +1,549 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@hono/node-server': + specifier: ^1.13.7 + version: 1.13.7(hono@4.6.12) + '@hono/zod-validator': + specifier: ^0.4.1 + version: 0.4.1(hono@4.6.12)(zod@3.23.8) + axios: + specifier: ^1.7.7 + version: 1.7.7 + dotenv: + specifier: ^16.4.5 + version: 16.4.5 + hono: + specifier: ^4.6.12 + version: 4.6.12 + jsonwebtoken: + specifier: ^9.0.2 + version: 9.0.2 + jwt-decode: + specifier: ^4.0.0 + version: 4.0.0 + zod: + specifier: ^3.23.8 + version: 3.23.8 + devDependencies: + '@types/node': + specifier: ^20.11.17 + version: 20.17.7 + tsx: + specifier: ^4.7.1 + version: 4.19.2 + +packages: + + '@esbuild/aix-ppc64@0.23.1': + resolution: {integrity: sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.23.1': + resolution: {integrity: sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.23.1': + resolution: {integrity: sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.23.1': + resolution: {integrity: sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.23.1': + resolution: {integrity: sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.23.1': + resolution: {integrity: sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.23.1': + resolution: {integrity: sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.23.1': + resolution: {integrity: sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.23.1': + resolution: {integrity: sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.23.1': + resolution: {integrity: sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.23.1': + resolution: {integrity: sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.23.1': + resolution: {integrity: sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.23.1': + resolution: {integrity: sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.23.1': + resolution: {integrity: sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.23.1': + resolution: {integrity: sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.23.1': + resolution: {integrity: sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.23.1': + resolution: {integrity: sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-x64@0.23.1': + resolution: {integrity: sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.23.1': + resolution: {integrity: sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.23.1': + resolution: {integrity: sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/sunos-x64@0.23.1': + resolution: {integrity: sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.23.1': + resolution: {integrity: sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.23.1': + resolution: {integrity: sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.23.1': + resolution: {integrity: sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@hono/node-server@1.13.7': + resolution: {integrity: sha512-kTfUMsoloVKtRA2fLiGSd9qBddmru9KadNyhJCwgKBxTiNkaAJEwkVN9KV/rS4HtmmNRtUh6P+YpmjRMl0d9vQ==} + engines: {node: '>=18.14.1'} + peerDependencies: + hono: ^4 + + '@hono/zod-validator@0.4.1': + resolution: {integrity: sha512-I8LyfeJfvVmC5hPjZ2Iij7RjexlgSBT7QJudZ4JvNPLxn0JQ3sqclz2zydlwISAnw21D2n4LQ0nfZdoiv9fQQA==} + peerDependencies: + hono: '>=3.9.0' + zod: ^3.19.1 + + '@types/node@20.17.7': + resolution: {integrity: sha512-sZXXnpBFMKbao30dUAvzKbdwA2JM1fwUtVEq/kxKuPI5mMwZiRElCpTXb0Biq/LMEVpXDZL5G5V0RPnxKeyaYg==} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + axios@1.7.7: + resolution: {integrity: sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==} + + buffer-equal-constant-time@1.0.1: + resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + dotenv@16.4.5: + resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} + engines: {node: '>=12'} + + ecdsa-sig-formatter@1.0.11: + resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==} + + esbuild@0.23.1: + resolution: {integrity: sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==} + engines: {node: '>=18'} + hasBin: true + + follow-redirects@1.15.9: + resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + form-data@4.0.1: + resolution: {integrity: sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==} + engines: {node: '>= 6'} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + get-tsconfig@4.8.1: + resolution: {integrity: sha512-k9PN+cFBmaLWtVz29SkUoqU5O0slLuHJXt/2P+tMVFT+phsSGXGkp9t3rQIqdz0e+06EHNGs3oM6ZX1s2zHxRg==} + + hono@4.6.12: + resolution: {integrity: sha512-eHtf4kSDNw6VVrdbd5IQi16r22m3s7mWPLd7xOMhg1a/Yyb1A0qpUFq8xYMX4FMuDe1nTKeMX5rTx7Nmw+a+Ag==} + engines: {node: '>=16.9.0'} + + jsonwebtoken@9.0.2: + resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==} + engines: {node: '>=12', npm: '>=6'} + + jwa@1.4.1: + resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==} + + jws@3.2.2: + resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} + + jwt-decode@4.0.0: + resolution: {integrity: sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==} + engines: {node: '>=18'} + + lodash.includes@4.3.0: + resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} + + lodash.isboolean@3.0.3: + resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==} + + lodash.isinteger@4.0.4: + resolution: {integrity: sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==} + + lodash.isnumber@3.0.3: + resolution: {integrity: sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==} + + lodash.isplainobject@4.0.6: + resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==} + + lodash.isstring@4.0.1: + resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==} + + lodash.once@4.1.1: + resolution: {integrity: sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + proxy-from-env@1.1.0: + resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + semver@7.6.3: + resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==} + engines: {node: '>=10'} + hasBin: true + + tsx@4.19.2: + resolution: {integrity: sha512-pOUl6Vo2LUq/bSa8S5q7b91cgNSjctn9ugq/+Mvow99qW6x/UZYwzxy/3NmqoT66eHYfCVvFvACC58UBPFf28g==} + engines: {node: '>=18.0.0'} + hasBin: true + + undici-types@6.19.8: + resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==} + + zod@3.23.8: + resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} + +snapshots: + + '@esbuild/aix-ppc64@0.23.1': + optional: true + + '@esbuild/android-arm64@0.23.1': + optional: true + + '@esbuild/android-arm@0.23.1': + optional: true + + '@esbuild/android-x64@0.23.1': + optional: true + + '@esbuild/darwin-arm64@0.23.1': + optional: true + + '@esbuild/darwin-x64@0.23.1': + optional: true + + '@esbuild/freebsd-arm64@0.23.1': + optional: true + + '@esbuild/freebsd-x64@0.23.1': + optional: true + + '@esbuild/linux-arm64@0.23.1': + optional: true + + '@esbuild/linux-arm@0.23.1': + optional: true + + '@esbuild/linux-ia32@0.23.1': + optional: true + + '@esbuild/linux-loong64@0.23.1': + optional: true + + '@esbuild/linux-mips64el@0.23.1': + optional: true + + '@esbuild/linux-ppc64@0.23.1': + optional: true + + '@esbuild/linux-riscv64@0.23.1': + optional: true + + '@esbuild/linux-s390x@0.23.1': + optional: true + + '@esbuild/linux-x64@0.23.1': + optional: true + + '@esbuild/netbsd-x64@0.23.1': + optional: true + + '@esbuild/openbsd-arm64@0.23.1': + optional: true + + '@esbuild/openbsd-x64@0.23.1': + optional: true + + '@esbuild/sunos-x64@0.23.1': + optional: true + + '@esbuild/win32-arm64@0.23.1': + optional: true + + '@esbuild/win32-ia32@0.23.1': + optional: true + + '@esbuild/win32-x64@0.23.1': + optional: true + + '@hono/node-server@1.13.7(hono@4.6.12)': + dependencies: + hono: 4.6.12 + + '@hono/zod-validator@0.4.1(hono@4.6.12)(zod@3.23.8)': + dependencies: + hono: 4.6.12 + zod: 3.23.8 + + '@types/node@20.17.7': + dependencies: + undici-types: 6.19.8 + + asynckit@0.4.0: {} + + axios@1.7.7: + dependencies: + follow-redirects: 1.15.9 + form-data: 4.0.1 + proxy-from-env: 1.1.0 + transitivePeerDependencies: + - debug + + buffer-equal-constant-time@1.0.1: {} + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + + delayed-stream@1.0.0: {} + + dotenv@16.4.5: {} + + ecdsa-sig-formatter@1.0.11: + dependencies: + safe-buffer: 5.2.1 + + esbuild@0.23.1: + optionalDependencies: + '@esbuild/aix-ppc64': 0.23.1 + '@esbuild/android-arm': 0.23.1 + '@esbuild/android-arm64': 0.23.1 + '@esbuild/android-x64': 0.23.1 + '@esbuild/darwin-arm64': 0.23.1 + '@esbuild/darwin-x64': 0.23.1 + '@esbuild/freebsd-arm64': 0.23.1 + '@esbuild/freebsd-x64': 0.23.1 + '@esbuild/linux-arm': 0.23.1 + '@esbuild/linux-arm64': 0.23.1 + '@esbuild/linux-ia32': 0.23.1 + '@esbuild/linux-loong64': 0.23.1 + '@esbuild/linux-mips64el': 0.23.1 + '@esbuild/linux-ppc64': 0.23.1 + '@esbuild/linux-riscv64': 0.23.1 + '@esbuild/linux-s390x': 0.23.1 + '@esbuild/linux-x64': 0.23.1 + '@esbuild/netbsd-x64': 0.23.1 + '@esbuild/openbsd-arm64': 0.23.1 + '@esbuild/openbsd-x64': 0.23.1 + '@esbuild/sunos-x64': 0.23.1 + '@esbuild/win32-arm64': 0.23.1 + '@esbuild/win32-ia32': 0.23.1 + '@esbuild/win32-x64': 0.23.1 + + follow-redirects@1.15.9: {} + + form-data@4.0.1: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + + fsevents@2.3.3: + optional: true + + get-tsconfig@4.8.1: + dependencies: + resolve-pkg-maps: 1.0.0 + + hono@4.6.12: {} + + jsonwebtoken@9.0.2: + dependencies: + jws: 3.2.2 + lodash.includes: 4.3.0 + lodash.isboolean: 3.0.3 + lodash.isinteger: 4.0.4 + lodash.isnumber: 3.0.3 + lodash.isplainobject: 4.0.6 + lodash.isstring: 4.0.1 + lodash.once: 4.1.1 + ms: 2.1.3 + semver: 7.6.3 + + jwa@1.4.1: + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + + jws@3.2.2: + dependencies: + jwa: 1.4.1 + safe-buffer: 5.2.1 + + jwt-decode@4.0.0: {} + + lodash.includes@4.3.0: {} + + lodash.isboolean@3.0.3: {} + + lodash.isinteger@4.0.4: {} + + lodash.isnumber@3.0.3: {} + + lodash.isplainobject@4.0.6: {} + + lodash.isstring@4.0.1: {} + + lodash.once@4.1.1: {} + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + ms@2.1.3: {} + + proxy-from-env@1.1.0: {} + + resolve-pkg-maps@1.0.0: {} + + safe-buffer@5.2.1: {} + + semver@7.6.3: {} + + tsx@4.19.2: + dependencies: + esbuild: 0.23.1 + get-tsconfig: 4.8.1 + optionalDependencies: + fsevents: 2.3.3 + + undici-types@6.19.8: {} + + zod@3.23.8: {} diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/controllers/guestTokenController.ts b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/controllers/guestTokenController.ts new file mode 100644 index 000000000..76747e050 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/controllers/guestTokenController.ts @@ -0,0 +1,33 @@ +import type { Context } from "hono"; +import { GuestTokenService } from "../services/guestTokenService.js"; +import { jwtDecode } from "jwt-decode"; +import type { TokenPayload, GuestTokenRequest } from "../types.js"; + +export class GuestTokenController { + private guestTokenService: GuestTokenService; + + constructor(guestTokenService: GuestTokenService) { + this.guestTokenService = guestTokenService; + } + + async generateToken(c: Context) { + try { + const { access_token, dashboard_id } = c.req.valid( + "json" + ) as GuestTokenRequest; + const token = jwtDecode(access_token) as TokenPayload; + + if (!token.email) { + return c.json({ message: "Unauthorized - Please login" }, 401); + } + + const guestToken = await this.guestTokenService.generateGuestToken( + dashboard_id + ); + return c.json({ token: guestToken }, 200); + } catch (error) { + console.error("Error processing guest token request:", error); + return c.json({ message: "Failed to generate guest token" }, 500); + } + } +} diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/envParser.ts b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/envParser.ts new file mode 100644 index 000000000..ad6ff12fd --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/envParser.ts @@ -0,0 +1,8 @@ +import z from "zod"; + +export const envSchema = z.object({ + SUPERSET_URL: z.string(), + SUPERSET_ADMIN: z.string(), + SUPERSET_ADMIN_PASSWORD: z.string(), + APP_PORT: z.string().transform((val) => parseInt(val, 10)), +}); diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/index.ts b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/index.ts new file mode 100644 index 000000000..2fdfe9d93 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/index.ts @@ -0,0 +1,49 @@ +import { serve } from "@hono/node-server"; +import { Hono } from "hono"; +import { envSchema } from "./envParser.js"; +import dotenv from "dotenv"; +import { cors } from "hono/cors"; +import { SupersetService } from "./services/supersetService.js"; +import { GuestTokenService } from "./services/guestTokenService.js"; +import { GuestTokenController } from "./controllers/guestTokenController.js"; +import { setupGuestTokenRoutes } from "./routes/guestTokenRoutes.js"; + +// Parse Environment variables +dotenv.config(); +const parsedEnv = envSchema.safeParse(process.env); +if (!parsedEnv.success) { + console.error("Invalid environment variables:", parsedEnv.error.format()); + process.exit(1); +} + +const app = new Hono(); + +// Setup services and controllers +const supersetService = new SupersetService(parsedEnv.data.SUPERSET_URL); +const guestTokenService = new GuestTokenService(supersetService); +const guestTokenController = new GuestTokenController(guestTokenService); + +const corsOptions = { + origin: '*', + methods: 'GET,POST,PUT,DELETE', + allowedHeaders: 'Content-Type,Authorization', +}; +// Middleware +app.use(cors(corsOptions)); + +// Setup routes +setupGuestTokenRoutes(app, guestTokenController); + +// Error Handling +app.onError((err, c) => { + console.error("Application error:", err); + return c.json({ message: "Internal Server Error" }, 500); +}); + +const port = parsedEnv.data.APP_PORT; +console.log(`Server is running on http://localhost:${port}`); + +serve({ + fetch: app.fetch, + port, +}); diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/routes/guestTokenRoutes.ts b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/routes/guestTokenRoutes.ts new file mode 100644 index 000000000..92c3c0ad1 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/routes/guestTokenRoutes.ts @@ -0,0 +1,21 @@ +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import { z } from "zod"; +import { GuestTokenController } from "../controllers/guestTokenController.js"; + +export function setupGuestTokenRoutes( + app: Hono, + controller: GuestTokenController +) { + const guestTokenValidator = zValidator( + "json", + z.object({ + access_token: z.string(), + dashboard_id: z.string(), + }) + ); + + app.post("/middleware/guest_token", guestTokenValidator, (c) => + controller.generateToken(c) + ); +} diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/services/guestTokenService.ts b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/services/guestTokenService.ts new file mode 100644 index 000000000..e808ae9e5 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/services/guestTokenService.ts @@ -0,0 +1,17 @@ +import { SupersetService } from "./supersetService.js"; + +export class GuestTokenService { + private supersetService: SupersetService; + + constructor(supersetService: SupersetService) { + this.supersetService = supersetService; + } + + async generateGuestToken(dashboardId: string): Promise { + const supersetAccessToken = await this.supersetService.login(); + return await this.supersetService.getGuestToken( + dashboardId, + supersetAccessToken + ); + } +} diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/services/supersetService.ts b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/services/supersetService.ts new file mode 100644 index 000000000..e34d9b85b --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/services/supersetService.ts @@ -0,0 +1,63 @@ +import axios from "axios"; +import dotenv from "dotenv"; +import { envSchema } from "../envParser.js"; + +dotenv.config(); +const parsedEnv = envSchema.safeParse(process.env); +if (!parsedEnv.success) { + console.error("Invalid environment variables:", parsedEnv.error.format()); + process.exit(1); +} + +export class SupersetService { + private baseUrl: string; + + constructor(baseUrl: string) { + this.baseUrl = baseUrl; + } + + async login() { + const loginBody = { + password: parsedEnv.data?.SUPERSET_ADMIN_PASSWORD, + username: parsedEnv.data?.SUPERSET_ADMIN, + provider: "db", + refresh: true, + }; + + const { data } = await axios.post(`${this.baseUrl}login`, loginBody, { + headers: { "Content-Type": "application/json" }, + }); + + return data.access_token; + } + + async getGuestToken(dashboardId: string, accessToken: string) { + const guestTokenBody = { + resources: [ + { + type: "dashboard", + id: dashboardId, + }, + ], + rls: [], + user: { + username: "", + first_name: "", + last_name: "", + }, + }; + + const { data } = await axios.post( + `${this.baseUrl}guest_token/`, + guestTokenBody, + { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${accessToken}`, + }, + } + ); + + return data.token; + } +} diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/types.ts b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/types.ts new file mode 100644 index 000000000..b78485cb8 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/src/types.ts @@ -0,0 +1,13 @@ +export interface EnvConfig { + SUPERSET_URL: string; + APP_PORT: number; +} + +export interface TokenPayload { + email: string; +} + +export interface GuestTokenRequest { + access_token: string; + dashboard_id: string; +} diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/superset-middleware-stack.yaml b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/superset-middleware-stack.yaml new file mode 100644 index 000000000..a837cf817 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/superset-middleware-stack.yaml @@ -0,0 +1,31 @@ +version: '3.8' +services: + superset-middleware: + image: ghcr.io/datakaveri/superset-middleware:v4 + build: + context: . + dockerfile: Dockerfile + env_file: + - .env + deploy: + replicas: 1 + resources: + limits: + cpus: "0.5" + memory: "512M" + reservations: + cpus: "0.25" + memory: "256M" + restart_policy: + condition: on-failure + placement: + constraints: + - "node.labels.superset-node==true" + + networks: + - overlay-net + +networks: + overlay-net: + external: true + driver: overlay diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/tsconfig.json b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/tsconfig.json new file mode 100644 index 000000000..7719c236c --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "ESNext", + "module": "NodeNext", + "strict": true, + "verbatimModuleSyntax": true, + "skipLibCheck": true, + "types": [ + "node" + ], + "jsx": "react-jsx", + "jsxImportSource": "hono/jsx", + } +} \ No newline at end of file diff --git a/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml b/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml index 5c43a9a3e..48ed56790 100644 --- a/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml +++ b/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml @@ -1,8 +1,7 @@ version: '3.9' - services: redis: - image: redis:7 + image: redis:7.4 container_name: superset_cache restart: unless-stopped deploy: @@ -10,22 +9,50 @@ services: restart_policy: condition: any max_attempts: 5 + placement: + constraints: + - "node.labels.superset-node==true" + resources: + limits: + cpus: '2' + memory: 6G + reservations: + cpus: '1' + memory: 3G volumes: - redis:/data + networks: + - overlay-net + configs: + - source: redis-conf + target: /usr/local/etc/redis/redis.conf + superset_init: - image: ghcr.io/datakaveri/superset:4.0.2-1 + image: ghcr.io/datakaveri/superset:4.0.2-8 container_name: superset_init env_file: - .env volumes: - superset_home:/app/superset_home - command: ["/app/docker/docker-init.sh"] + configs: + - source: client-secret + target: /app/docker/pythonpath_dev/client_secret.json + mode: 0444 + - source: requirements + target: /app/docker/requirements-local.txt + mode: 0444 networks: - overlay-net + deploy: + replicas: 1 + restart_policy: + condition: on-failure + command: ["/app/docker/docker-init.sh"] + superset: - image: ghcr.io/datakaveri/superset:4.0.2-1 + image: ghcr.io/datakaveri/superset:4.0.2-8 container_name: superset restart: unless-stopped ports: @@ -33,9 +60,7 @@ services: env_file: - .env depends_on: - - superset_init - environment: - - SUPERSET_DATABASE_URL=clickhousedb://default:911d8hni65@tasks.clickhouse:8123/default # Connection URL + - superset_init volumes: - superset_home:/app/superset_home configs: @@ -44,6 +69,15 @@ services: mode: 0444 uid: "1000" gid: "1000" + - source: client-secret + target: /app/docker/pythonpath_dev/client_secret.json + mode: 0444 + - source: superset_config + target: /app/docker/pythonpath_dev/superset_config.py + mode: 0444 + - source: superset_conf + target: /app/superset/config.py + mode: 0444 command: ["/app/docker/docker-bootstrap.sh", "app-gunicorn"] networks: - overlay-net @@ -52,6 +86,16 @@ services: restart_policy: condition: any max_attempts: 5 + placement: + constraints: + - "node.labels.superset-node==true" + resources: + limits: + cpus: '4' + memory: 14G + reservations: + cpus: '1' + memory: 3G logging: driver: "json-file" options: @@ -61,18 +105,32 @@ services: superset-worker: - image: ghcr.io/datakaveri/superset:4.0.2-1 + image: ghcr.io/datakaveri/superset:4.0.2-8 container_name: superset_worker env_file: - .env # default restart: unless-stopped volumes: - superset_home:/app/superset_home + configs: + - source: client-secret + target: /app/docker/pythonpath_dev/client_secret.json + mode: 0444 deploy: replicas: 1 restart_policy: condition: any max_attempts: 5 + placement: + constraints: + - "node.labels.superset-node==true" + resources: + limits: + cpus: '1' + memory: 6G + reservations: + cpus: '0.5' + memory: 2G healthcheck: test: [ @@ -85,18 +143,32 @@ services: superset-worker-beat: - image: ghcr.io/datakaveri/superset:4.0.2-1 + image: ghcr.io/datakaveri/superset:4.0.2-8 container_name: superset_worker_beat env_file: - .env # default restart: unless-stopped volumes: - superset_home:/app/superset_home + configs: + - source: client-secret + target: /app/docker/pythonpath_dev/client_secret.json + mode: 0444 deploy: replicas: 1 restart_policy: condition: any max_attempts: 5 + placement: + constraints: + - "node.labels.superset-node==true" + resources: + limits: + cpus: '1' + memory: 6G + reservations: + cpus: '0.5' + memory: 2G healthcheck: disable: true networks: @@ -104,15 +176,23 @@ services: command: ["/app/docker/docker-bootstrap.sh", "beat"] + volumes: superset_home: redis: networks: - overlay-net: - external: true - driver: overlay + overlay-net: + external: true + driver: overlay configs: requirements: file: ./docker/requirements-local.txt - + redis-conf: + file: ./redis.conf + client-secret: + file: ./secrets/client-secret.json + superset_config: + file: ./docker/pythonpath_dev/superset_config.py + superset_conf: + file: ./superset/config.py diff --git a/Docker-Swarm-deployment/analytics/superset/superset/config.py b/Docker-Swarm-deployment/analytics/superset/superset/config.py new file mode 100644 index 000000000..fcfde68a3 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/superset/config.py @@ -0,0 +1,1931 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""The main config file for Superset + +All configuration in this file can be overridden by providing a superset_config +in your PYTHONPATH as there is a ``from superset_config import *`` +at the end of this file. +""" + +# mypy: ignore-errors +# pylint: disable=too-many-lines +from __future__ import annotations + +import importlib.util +import json +import logging +import os +import re +import sys +from collections import OrderedDict +from contextlib import contextmanager +from datetime import timedelta +from email.mime.multipart import MIMEMultipart +from importlib.resources import files +from typing import Any, Callable, Iterator, Literal, TYPE_CHECKING, TypedDict + +import click +import pkg_resources +from celery.schedules import crontab +from flask import Blueprint +from flask_appbuilder.security.manager import AUTH_DB +from flask_caching.backends.base import BaseCache +from pandas import Series +from pandas._libs.parsers import STR_NA_VALUES +from sqlalchemy.engine.url import URL +from sqlalchemy.orm.query import Query + +from superset.advanced_data_type.plugins.internet_address import internet_address +from superset.advanced_data_type.plugins.internet_port import internet_port +from superset.advanced_data_type.types import AdvancedDataType +from superset.constants import CHANGE_ME_SECRET_KEY +from superset.jinja_context import BaseTemplateProcessor +from superset.key_value.types import JsonKeyValueCodec +from superset.stats_logger import DummyStatsLogger +from superset.superset_typing import CacheConfig +from superset.tasks.types import ExecutorType +from superset.utils import core as utils +from superset.utils.core import is_test, NO_TIME_RANGE, parse_boolean_string +from superset.utils.encrypt import SQLAlchemyUtilsAdapter +from superset.utils.log import DBEventLogger +from superset.utils.logging_configurator import DefaultLoggingConfigurator + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from flask_appbuilder.security.sqla import models + from sqlglot import Dialect, Dialects + + from superset.connectors.sqla.models import SqlaTable + from superset.models.core import Database + from superset.models.dashboard import Dashboard + from superset.models.slice import Slice + +# Realtime stats logger, a StatsD implementation exists +STATS_LOGGER = DummyStatsLogger() + +# By default will log events to the metadata database with `DBEventLogger` +# Note that you can use `StdOutEventLogger` for debugging +# Note that you can write your own event logger by extending `AbstractEventLogger` +# https://github.com/apache/superset/blob/master/superset/utils/log.py +EVENT_LOGGER = DBEventLogger() + +SUPERSET_LOG_VIEW = True + +BASE_DIR = pkg_resources.resource_filename("superset", "") +if "SUPERSET_HOME" in os.environ: + DATA_DIR = os.environ["SUPERSET_HOME"] +else: + DATA_DIR = os.path.expanduser("~/.superset") + +# --------------------------------------------------------- +# Superset specific config +# --------------------------------------------------------- +VERSION_INFO_FILE = str(files("superset") / "static/version_info.json") +PACKAGE_JSON_FILE = str(files("superset") / "static/assets/package.json") + + +# Multiple favicons can be specified here. The "href" property +# is mandatory, but "sizes," "type," and "rel" are optional. +# For example: +# { +# "href":path/to/image.png", +# "sizes": "16x16", +# "type": "image/png" +# "rel": "icon" +# }, +FAVICONS = [{"href": "/static/assets/images/favicon.png"}] + + +def _try_json_readversion(filepath: str) -> str | None: + try: + with open(filepath) as f: + return json.load(f).get("version") + except Exception: # pylint: disable=broad-except + return None + + +def _try_json_readsha(filepath: str, length: int) -> str | None: + try: + with open(filepath) as f: + return json.load(f).get("GIT_SHA")[:length] + except Exception: # pylint: disable=broad-except + return None + + +# +# If True, we will skip the call to load the logger config found in alembic.init +# +ALEMBIC_SKIP_LOG_CONFIG = False + +# Depending on the context in which this config is loaded, the +# version_info.json file may or may not be available, as it is +# generated on install via setup.py. In the event that we're +# actually running Superset, we will have already installed, +# therefore it WILL exist. When unit tests are running, however, +# it WILL NOT exist, so we fall back to reading package.json +VERSION_STRING = _try_json_readversion(VERSION_INFO_FILE) or _try_json_readversion( + PACKAGE_JSON_FILE +) + +VERSION_SHA_LENGTH = 8 +VERSION_SHA = _try_json_readsha(VERSION_INFO_FILE, VERSION_SHA_LENGTH) + +# Build number is shown in the About section if available. This +# can be replaced at build time to expose build information. +BUILD_NUMBER = None + +# default viz used in chart explorer & SQL Lab explore +DEFAULT_VIZ_TYPE = "table" + +# default row limit when requesting chart data +ROW_LIMIT = 50000 +# default row limit when requesting samples from datasource in explore view +SAMPLES_ROW_LIMIT = 1000 +# default row limit for native filters +NATIVE_FILTER_DEFAULT_ROW_LIMIT = 1000 +# max rows retrieved by filter select auto complete +FILTER_SELECT_ROW_LIMIT = 10000 +# default time filter in explore +# values may be "Last day", "Last week", " : now", etc. +DEFAULT_TIME_FILTER = NO_TIME_RANGE + +# This is an important setting, and should be lower than your +# [load balancer / proxy / envoy / kong / ...] timeout settings. +# You should also make sure to configure your WSGI server +# (gunicorn, nginx, apache, ...) timeout setting to be <= to this setting +SUPERSET_WEBSERVER_TIMEOUT = int(timedelta(minutes=1).total_seconds()) + +# this 2 settings are used by dashboard period force refresh feature +# When user choose auto force refresh frequency +# < SUPERSET_DASHBOARD_PERIODICAL_REFRESH_LIMIT +# they will see warning message in the Refresh Interval Modal. +# please check PR #9886 +SUPERSET_DASHBOARD_PERIODICAL_REFRESH_LIMIT = 0 +SUPERSET_DASHBOARD_PERIODICAL_REFRESH_WARNING_MESSAGE = None + +SUPERSET_DASHBOARD_POSITION_DATA_LIMIT = 65535 +CUSTOM_SECURITY_MANAGER = None +SQLALCHEMY_TRACK_MODIFICATIONS = False +# --------------------------------------------------------- + +# Your App secret key. Make sure you override it on superset_config.py +# or use `SUPERSET_SECRET_KEY` environment variable. +# Use a strong complex alphanumeric string and use a tool to help you generate +# a sufficiently random sequence, ex: openssl rand -base64 42" +SECRET_KEY = os.environ.get("SUPERSET_SECRET_KEY") or CHANGE_ME_SECRET_KEY + +# The SQLAlchemy connection string. +SQLALCHEMY_DATABASE_URI = ( + f"""sqlite:///{os.path.join(DATA_DIR, "superset.db")}?check_same_thread=false""" +) + +# SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp' +# SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp' + +# This config is exposed through flask-sqlalchemy, and can be used to set your metadata +# database connection settings. You can use this to set arbitrary connection settings +# that may be specific to the database engine you are using. +# Note that you can use this to set the isolation level of your database, as in +# `SQLALCHEMY_ENGINE_OPTIONS = {"isolation_level": "READ COMMITTED"}` +# Also note that we recommend READ COMMITTED for regular operation. +# Find out more here https://flask-sqlalchemy.palletsprojects.com/en/3.1.x/config/ +SQLALCHEMY_ENGINE_OPTIONS = {} + +# In order to hook up a custom password store for all SQLALCHEMY connections +# implement a function that takes a single argument of type 'sqla.engine.url', +# returns a password and set SQLALCHEMY_CUSTOM_PASSWORD_STORE. +# +# e.g.: +# def lookup_password(url): +# return 'secret' +# SQLALCHEMY_CUSTOM_PASSWORD_STORE = lookup_password +SQLALCHEMY_CUSTOM_PASSWORD_STORE = None + +# +# The EncryptedFieldTypeAdapter is used whenever we're building SqlAlchemy models +# which include sensitive fields that should be app-encrypted BEFORE sending +# to the DB. +# +# Note: the default impl leverages SqlAlchemyUtils' EncryptedType, which defaults +# to AesEngine that uses AES-128 under the covers using the app's SECRET_KEY +# as key material. Do note that AesEngine allows for queryability over the +# encrypted fields. +# +# To change the default engine you need to define your own adapter: +# +# e.g.: +# +# class AesGcmEncryptedAdapter( +# AbstractEncryptedFieldAdapter +# ): +# def create( +# self, +# app_config: Optional[Dict[str, Any]], +# *args: List[Any], +# **kwargs: Optional[Dict[str, Any]], +# ) -> TypeDecorator: +# if app_config: +# return EncryptedType( +# *args, app_config["SECRET_KEY"], engine=AesGcmEngine, **kwargs +# ) +# raise Exception("Missing app_config kwarg") +# +# +# SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER = AesGcmEncryptedAdapter +SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER = ( # pylint: disable=invalid-name + SQLAlchemyUtilsAdapter +) + +# Extends the default SQLGlot dialects with additional dialects +SQLGLOT_DIALECTS_EXTENSIONS: dict[str, Dialects | type[Dialect]] = {} + +# The limit of queries fetched for query search +QUERY_SEARCH_LIMIT = 1000 + +# Flask-WTF flag for CSRF TODO: Disable CSRF +WTF_CSRF_ENABLED = False + +# Add endpoints that need to be exempt from CSRF protection +WTF_CSRF_EXEMPT_LIST = [ + "superset.views.core.log", + "superset.views.core.explore_json", + "superset.charts.data.api.data", + "superset.dashboards.api.cache_dashboard_screenshot", +] + +# Whether to run the web server in debug mode or not +DEBUG = parse_boolean_string(os.environ.get("FLASK_DEBUG")) +FLASK_USE_RELOAD = True + +# Enable profiling of Python calls. Turn this on and append ``?_instrument=1`` +# to the page to see the call stack. +PROFILING = False + +# Superset allows server-side python stacktraces to be surfaced to the +# user when this feature is on. This may have security implications +# and it's more secure to turn it off in production settings. +SHOW_STACKTRACE = False + +# Use all X-Forwarded headers when ENABLE_PROXY_FIX is True. +# When proxying to a different port, set "x_port" to 0 to avoid downstream issues. +ENABLE_PROXY_FIX = False +PROXY_FIX_CONFIG = {"x_for": 1, "x_proto": 1, "x_host": 1, "x_port": 1, "x_prefix": 1} + +# Configuration for scheduling queries from SQL Lab. +SCHEDULED_QUERIES: dict[str, Any] = {} + +# FAB Rate limiting: this is a security feature for preventing DDOS attacks. The +# feature is on by default to make Superset secure by default, but you should +# fine tune the limits to your needs. You can read more about the different +# parameters here: https://flask-limiter.readthedocs.io/en/stable/configuration.html +RATELIMIT_ENABLED = os.environ.get("SUPERSET_ENV") == "production" +RATELIMIT_APPLICATION = "50 per second" +AUTH_RATE_LIMITED = True +AUTH_RATE_LIMIT = "5 per second" +# A storage location conforming to the scheme in storage-scheme. See the limits +# library for allowed values: https://limits.readthedocs.io/en/stable/storage.html +# RATELIMIT_STORAGE_URI = "redis://host:port" +# A callable that returns the unique identity of the current request. +# RATELIMIT_REQUEST_IDENTIFIER = flask.Request.endpoint + +# ------------------------------ +# GLOBALS FOR APP Builder +# ------------------------------ +# Uncomment to setup Your App name +APP_NAME = "Dashboards | Ghana Revenue Authority" + +# Specify the App icon +APP_ICON = "/static/assets/images/GRA.png" + +# Specify where clicking the logo would take the user' +# Default value of None will take you to '/superset/welcome' +# You can also specify a relative URL e.g. '/superset/welcome' or '/dashboards/list' +# or you can specify a full URL e.g. 'https://foo.bar' +LOGO_TARGET_PATH = None + +# Specify tooltip that should appear when hovering over the App Icon/Logo +LOGO_TOOLTIP = "" + +# Specify any text that should appear to the right of the logo +LOGO_RIGHT_TEXT: Callable[[], str] | str = "" + +# Enables SWAGGER UI for superset openapi spec +# ex: http://localhost:8080/swagger/v1 +FAB_API_SWAGGER_UI = True + +# ---------------------------------------------------- +# AUTHENTICATION CONFIG +# ---------------------------------------------------- +# The authentication type +# AUTH_OID : Is for OpenID +# AUTH_DB : Is for database (username/password) +# AUTH_LDAP : Is for LDAP +# AUTH_REMOTE_USER : Is for using REMOTE_USER from web server +AUTH_TYPE = AUTH_DB + +# Uncomment to setup Full admin role name +# AUTH_ROLE_ADMIN = 'Admin' + +# Uncomment to setup Public role name, no authentication needed +# AUTH_ROLE_PUBLIC = 'Public' + +# Will allow user self registration +# AUTH_USER_REGISTRATION = True + +# The default user self registration role +# AUTH_USER_REGISTRATION_ROLE = "Public" + +# When using LDAP Auth, setup the LDAP server +# AUTH_LDAP_SERVER = "ldap://ldapserver.new" + +# Uncomment to setup OpenID providers example for OpenID authentication +# OPENID_PROVIDERS = [ +# { 'name': 'Yahoo', 'url': 'https://open.login.yahoo.com/' }, +# { 'name': 'Flickr', 'url': 'https://www.flickr.com/' }, +# ] +# --------------------------------------------------- +# Roles config +# --------------------------------------------------- +# Grant public role the same set of permissions as for a selected builtin role. +# This is useful if one wants to enable anonymous users to view +# dashboards. Explicit grant on specific datasets is still required. +PUBLIC_ROLE_LIKE: str | None = None + +# --------------------------------------------------- +# Babel config for translations +# --------------------------------------------------- +# Setup default language +BABEL_DEFAULT_LOCALE = "en" +# Your application default translation path +BABEL_DEFAULT_FOLDER = "superset/translations" +# The allowed translation for your app +LANGUAGES = { + "en": {"flag": "us", "name": "English"}, + "es": {"flag": "es", "name": "Spanish"}, + "it": {"flag": "it", "name": "Italian"}, + "fr": {"flag": "fr", "name": "French"}, + "zh": {"flag": "cn", "name": "Chinese"}, + "zh_TW": {"flag": "tw", "name": "Traditional Chinese"}, + "ja": {"flag": "jp", "name": "Japanese"}, + "de": {"flag": "de", "name": "German"}, + "pt": {"flag": "pt", "name": "Portuguese"}, + "pt_BR": {"flag": "br", "name": "Brazilian Portuguese"}, + "ru": {"flag": "ru", "name": "Russian"}, + "ko": {"flag": "kr", "name": "Korean"}, + "sk": {"flag": "sk", "name": "Slovak"}, + "sl": {"flag": "si", "name": "Slovenian"}, + "nl": {"flag": "nl", "name": "Dutch"}, + "uk": {"flag": "uk", "name": "Ukranian"}, +} +# Turning off i18n by default as translation in most languages are +# incomplete and not well maintained. +LANGUAGES = {} + + +# Override the default d3 locale format +# Default values are equivalent to +# D3_FORMAT = { +# "decimal": ".", # - decimal place string (e.g., "."). +# "thousands": ",", # - group separator string (e.g., ","). +# "grouping": [3], # - array of group sizes (e.g., [3]), cycled as needed. +# "currency": ["$", ""] # - currency prefix/suffix strings (e.g., ["$", ""]) +# } +# https://github.com/d3/d3-format/blob/main/README.md#formatLocale +class D3Format(TypedDict, total=False): + decimal: str + thousands: str + grouping: list[int] + currency: list[str] + + +D3_FORMAT: D3Format = {} + + +# Override the default d3 locale for time format +# Default values are equivalent to +# D3_TIME_FORMAT = { +# "dateTime": "%x, %X", +# "date": "%-m/%-d/%Y", +# "time": "%-I:%M:%S %p", +# "periods": ["AM", "PM"], +# "days": ["Sunday", "Monday", "Tuesday", "Wednesday", +# "Thursday", "Friday", "Saturday"], +# "shortDays": ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"], +# "months": ["January", "February", "March", "April", +# "May", "June", "July", "August", +# "September", "October", "November", "December"], +# "shortMonths": ["Jan", "Feb", "Mar", "Apr", +# "May", "Jun", "Jul", "Aug", +# "Sep", "Oct", "Nov", "Dec"] +# } +# https://github.com/d3/d3-time-format/tree/main#locales +class D3TimeFormat(TypedDict, total=False): + date: str + dateTime: str + time: str + periods: list[str] + days: list[str] + shortDays: list[str] + months: list[str] + shortMonths: list[str] + + +D3_TIME_FORMAT: D3TimeFormat = {} + +CURRENCIES = ["USD", "EUR", "GBP", "INR", "MXN", "JPY", "CNY"] + +# --------------------------------------------------- +# Feature flags +# --------------------------------------------------- +# Feature flags that are set by default go here. Their values can be +# overwritten by those specified under FEATURE_FLAGS in superset_config.py +# For example, DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False } here +# and FEATURE_FLAGS = { 'BAR': True, 'BAZ': True } in superset_config.py +# will result in combined feature flags of { 'FOO': True, 'BAR': True, 'BAZ': True } +DEFAULT_FEATURE_FLAGS: dict[str, bool] = { + # When using a recent version of Druid that supports JOINs turn this on + "DRUID_JOINS": False, + "DYNAMIC_PLUGINS": False, + # With Superset 2.0, we are updating the default so that the legacy datasource + # editor no longer shows. Currently this is set to false so that the editor + # option does show, but we will be depreciating it. + "DISABLE_LEGACY_DATASOURCE_EDITOR": True, + "ENABLE_TEMPLATE_PROCESSING": False, + # Allow for javascript controls components + # this enables programmers to customize certain charts (like the + # geospatial ones) by inputting javascript in controls. This exposes + # an XSS security vulnerability + "ENABLE_JAVASCRIPT_CONTROLS": False, # deprecated + "KV_STORE": False, # deprecated + # When this feature is enabled, nested types in Presto will be + # expanded into extra columns and/or arrays. This is experimental, + # and doesn't work with all nested types. + "PRESTO_EXPAND_DATA": False, + # Exposes API endpoint to compute thumbnails + "THUMBNAILS": False, + # Enable the endpoints to cache and retrieve dashboard screenshots via webdriver. + # Requires configuring Celery and a cache using THUMBNAIL_CACHE_CONFIG. + "ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS": False, + # Generate screenshots (PDF or JPG) of dashboards using the web driver. + # When disabled, screenshots are generated on the fly by the browser. + # This feature flag is used by the download feature in the dashboard view. + # It is dependent on ENABLE_DASHBOARD_SCREENSHOT_ENDPOINT being enabled. + "ENABLE_DASHBOARD_DOWNLOAD_WEBDRIVER_SCREENSHOT": False, + "SHARE_QUERIES_VIA_KV_STORE": False, + "TAGGING_SYSTEM": False, + "SQLLAB_BACKEND_PERSISTENCE": True, + "LISTVIEWS_DEFAULT_CARD_VIEW": False, + # When True, this escapes HTML (rather than rendering it) in Markdown components + "ESCAPE_MARKDOWN_HTML": False, + "DASHBOARD_CROSS_FILTERS": True, # deprecated + "DASHBOARD_VIRTUALIZATION": True, + # This feature flag is stil in beta and is not recommended for production use. + "GLOBAL_ASYNC_QUERIES": False, + "EMBEDDED_SUPERSET": False, + # Enables Alerts and reports new implementation + "ALERT_REPORTS": False, + "ALERT_REPORT_TABS": False, + "ALERT_REPORT_SLACK_V2": False, + "DASHBOARD_RBAC": False, + "ENABLE_ADVANCED_DATA_TYPES": False, + # Enabling ALERTS_ATTACH_REPORTS, the system sends email and slack message + # with screenshot and link + # Disables ALERTS_ATTACH_REPORTS, the system DOES NOT generate screenshot + # for report with type 'alert' and sends email and slack message with only link; + # for report with type 'report' still send with email and slack message with + # screenshot and link + "ALERTS_ATTACH_REPORTS": True, + # Allow users to export full CSV of table viz type. + # This could cause the server to run out of memory or compute. + "ALLOW_FULL_CSV_EXPORT": False, + "ALLOW_ADHOC_SUBQUERY": False, + "USE_ANALAGOUS_COLORS": False, + # Apply RLS rules to SQL Lab queries. This requires parsing and manipulating the + # query, and might break queries and/or allow users to bypass RLS. Use with care! + "RLS_IN_SQLLAB": False, + # When impersonating a user, use the email prefix instead of the username + "IMPERSONATE_WITH_EMAIL_PREFIX": False, + # Enable caching per impersonation key (e.g username) in a datasource where user + # impersonation is enabled + "CACHE_IMPERSONATION": False, + # Enable caching per user key for Superset cache (not database cache impersonation) + "CACHE_QUERY_BY_USER": False, + # Enable sharing charts with embedding + "EMBEDDABLE_CHARTS": True, + "DRILL_TO_DETAIL": True, + "DRILL_BY": True, + "DATAPANEL_CLOSED_BY_DEFAULT": False, + "HORIZONTAL_FILTER_BAR": False, + # The feature is off by default, and currently only supported in Presto and Postgres, + # and Bigquery. + # It also needs to be enabled on a per-database basis, by adding the key/value pair + # `cost_estimate_enabled: true` to the database `extra` attribute. + "ESTIMATE_QUERY_COST": False, + # Allow users to enable ssh tunneling when creating a DB. + # Users must check whether the DB engine supports SSH Tunnels + # otherwise enabling this flag won't have any effect on the DB. + "SSH_TUNNELING": False, + "AVOID_COLORS_COLLISION": True, + # Do not show user info in the menu + "MENU_HIDE_USER_INFO": False, + # Allows users to add a ``superset://`` DB that can query across databases. This is + # an experimental feature with potential security and performance risks, so use with + # caution. If the feature is enabled you can also set a limit for how much data is + # returned from each database in the ``SUPERSET_META_DB_LIMIT`` configuration value + # in this file. + "ENABLE_SUPERSET_META_DB": False, + # Set to True to replace Selenium with Playwright to execute reports and thumbnails. + # Unlike Selenium, Playwright reports support deck.gl visualizations + # Enabling this feature flag requires installing "playwright" pip package + "PLAYWRIGHT_REPORTS_AND_THUMBNAILS": False, + # Set to True to enable experimental chart plugins + "CHART_PLUGINS_EXPERIMENTAL": False, + # Regardless of database configuration settings, force SQLLAB to run async using Celery + "SQLLAB_FORCE_RUN_ASYNC": False, + # Set to True to to enable factory resent CLI command + "ENABLE_FACTORY_RESET_COMMAND": False, + # Whether Superset should use Slack avatars for users. + # If on, you'll want to add "https://avatars.slack-edge.com" to the list of allowed + # domains in your TALISMAN_CONFIG + "SLACK_ENABLE_AVATARS": False, +} + +# ------------------------------ +# SSH Tunnel +# ------------------------------ +# Allow users to set the host used when connecting to the SSH Tunnel +# as localhost and any other alias (0.0.0.0) +# ---------------------------------------------------------------------- +# | +# -------------+ | +----------+ +# LOCAL | | | REMOTE | :22 SSH +# CLIENT | <== SSH ========> | SERVER | :8080 web service +# -------------+ | +----------+ +# | +# FIREWALL (only port 22 is open) + +# ---------------------------------------------------------------------- +SSH_TUNNEL_MANAGER_CLASS = "superset.extensions.ssh.SSHManager" +SSH_TUNNEL_LOCAL_BIND_ADDRESS = "127.0.0.1" +#: Timeout (seconds) for tunnel connection (open_channel timeout) +SSH_TUNNEL_TIMEOUT_SEC = 10.0 +#: Timeout (seconds) for transport socket (``socket.settimeout``) +SSH_TUNNEL_PACKET_TIMEOUT_SEC = 1.0 + + +# Feature flags may also be set via 'SUPERSET_FEATURE_' prefixed environment vars. +DEFAULT_FEATURE_FLAGS.update( + { + k[len("SUPERSET_FEATURE_") :]: parse_boolean_string(v) + for k, v in os.environ.items() + if re.search(r"^SUPERSET_FEATURE_\w+", k) + } +) + +# This is merely a default. +FEATURE_FLAGS: dict[str, bool] = {} + +# A function that receives a dict of all feature flags +# (DEFAULT_FEATURE_FLAGS merged with FEATURE_FLAGS) +# can alter it, and returns a similar dict. Note the dict of feature +# flags passed to the function is a deepcopy of the dict in the config, +# and can therefore be mutated without side-effect +# +# GET_FEATURE_FLAGS_FUNC can be used to implement progressive rollouts, +# role-based features, or a full on A/B testing framework. +# +# from flask import g, request +# def GET_FEATURE_FLAGS_FUNC(feature_flags_dict: Dict[str, bool]) -> Dict[str, bool]: +# if hasattr(g, "user") and g.user.is_active: +# feature_flags_dict['some_feature'] = g.user and g.user.get_id() == 5 +# return feature_flags_dict +GET_FEATURE_FLAGS_FUNC: Callable[[dict[str, bool]], dict[str, bool]] | None = None +# A function that receives a feature flag name and an optional default value. +# Has a similar utility to GET_FEATURE_FLAGS_FUNC but it's useful to not force the +# evaluation of all feature flags when just evaluating a single one. +# +# Note that the default `get_feature_flags` will evaluate each feature with this +# callable when the config key is set, so don't use both GET_FEATURE_FLAGS_FUNC +# and IS_FEATURE_ENABLED_FUNC in conjunction. +IS_FEATURE_ENABLED_FUNC: Callable[[str, bool | None], bool] | None = None +# A function that expands/overrides the frontend `bootstrap_data.common` object. +# Can be used to implement custom frontend functionality, +# or dynamically change certain configs. +# +# Values in `bootstrap_data.common` should have these characteristics: +# - They are not specific to a page the user is visiting +# - They do not contain secrets +# +# Takes as a parameter the common bootstrap payload before transformations. +# Returns a dict containing data that should be added or overridden to the payload. +COMMON_BOOTSTRAP_OVERRIDES_FUNC: Callable[ # noqa: E731 + [dict[str, Any]], dict[str, Any] +] = lambda data: {} + +# EXTRA_CATEGORICAL_COLOR_SCHEMES is used for adding custom categorical color schemes +# example code for "My custom warm to hot" color scheme +# EXTRA_CATEGORICAL_COLOR_SCHEMES = [ +# { +# "id": 'myVisualizationColors', +# "description": '', +# "label": 'My Visualization Colors', +# "isDefault": True, +# "colors": +# ['#006699', '#009DD9', '#5AAA46', '#44AAAA', '#DDAA77', '#7799BB', '#88AA77', +# '#552288', '#5AAA46', '#CC7788', '#EEDD55', '#9977BB', '#BBAA44', '#DDCCDD'] +# }] + +# This is merely a default +EXTRA_CATEGORICAL_COLOR_SCHEMES: list[dict[str, Any]] = [] + +# THEME_OVERRIDES is used for adding custom theme to superset +# example code for "My theme" custom scheme +# THEME_OVERRIDES = { +# "borderRadius": 4, +# "colors": { +# "primary": { +# "base": 'red', +# }, +# "secondary": { +# "base": 'green', +# }, +# "grayscale": { +# "base": 'orange', +# } +# } +# } + +THEME_OVERRIDES: dict[str, Any] = {} + +# EXTRA_SEQUENTIAL_COLOR_SCHEMES is used for adding custom sequential color schemes +# EXTRA_SEQUENTIAL_COLOR_SCHEMES = [ +# { +# "id": 'warmToHot', +# "description": '', +# "isDiverging": True, +# "label": 'My custom warm to hot', +# "isDefault": True, +# "colors": +# ['#552288', '#5AAA46', '#CC7788', '#EEDD55', '#9977BB', '#BBAA44', '#DDCCDD', +# '#006699', '#009DD9', '#5AAA46', '#44AAAA', '#DDAA77', '#7799BB', '#88AA77'] +# }] + +# This is merely a default +EXTRA_SEQUENTIAL_COLOR_SCHEMES: list[dict[str, Any]] = [] + +# --------------------------------------------------- +# Thumbnail config (behind feature flag) +# --------------------------------------------------- +# By default, thumbnails are rendered per user, and will fall back to the Selenium +# user for anonymous users. Similar to Alerts & Reports, thumbnails +# can be configured to always be rendered as a fixed user. See +# `superset.tasks.types.ExecutorType` for a full list of executor options. +# To always use a fixed user account, use the following configuration: +# THUMBNAIL_EXECUTE_AS = [ExecutorType.SELENIUM] +THUMBNAIL_SELENIUM_USER: str | None = "admin" +THUMBNAIL_EXECUTE_AS = [ExecutorType.CURRENT_USER, ExecutorType.SELENIUM] + +# By default, thumbnail digests are calculated based on various parameters in the +# chart/dashboard metadata, and in the case of user-specific thumbnails, the +# username. To specify a custom digest function, use the following config parameters +# to define callbacks that receive +# 1. the model (dashboard or chart) +# 2. the executor type (e.g. ExecutorType.SELENIUM) +# 3. the executor's username (note, this is the executor as defined by +# `THUMBNAIL_EXECUTE_AS`; the executor is only equal to the currently logged in +# user if the executor type is equal to `ExecutorType.CURRENT_USER`) +# and return the final digest string: +THUMBNAIL_DASHBOARD_DIGEST_FUNC: ( + None | (Callable[[Dashboard, ExecutorType, str], str]) +) = None +THUMBNAIL_CHART_DIGEST_FUNC: Callable[[Slice, ExecutorType, str], str] | None = None + +THUMBNAIL_CACHE_CONFIG: CacheConfig = { + "CACHE_TYPE": "NullCache", + "CACHE_NO_NULL_WARNING": True, +} + +# Time before selenium times out after trying to locate an element on the page and wait +# for that element to load for a screenshot. +SCREENSHOT_LOCATE_WAIT = int(timedelta(seconds=10).total_seconds()) +# Time before selenium times out after waiting for all DOM class elements named +# "loading" are gone. +SCREENSHOT_LOAD_WAIT = int(timedelta(minutes=1).total_seconds()) +# Selenium destroy retries +SCREENSHOT_SELENIUM_RETRIES = 5 +# Give selenium an headstart, in seconds +SCREENSHOT_SELENIUM_HEADSTART = 3 +# Wait for the chart animation, in seconds +SCREENSHOT_SELENIUM_ANIMATION_WAIT = 5 +# Replace unexpected errors in screenshots with real error messages +SCREENSHOT_REPLACE_UNEXPECTED_ERRORS = False +# Max time to wait for error message modal to show up, in seconds +SCREENSHOT_WAIT_FOR_ERROR_MODAL_VISIBLE = 5 +# Max time to wait for error message modal to close, in seconds +SCREENSHOT_WAIT_FOR_ERROR_MODAL_INVISIBLE = 5 +# Event that Playwright waits for when loading a new page +# Possible values: "load", "commit", "domcontentloaded", "networkidle" +# Docs: https://playwright.dev/python/docs/api/class-page#page-goto-option-wait-until +SCREENSHOT_PLAYWRIGHT_WAIT_EVENT = "load" +# Default timeout for Playwright browser context for all operations +SCREENSHOT_PLAYWRIGHT_DEFAULT_TIMEOUT = int( + timedelta(seconds=30).total_seconds() * 1000 +) + +# --------------------------------------------------- +# Image and file configuration +# --------------------------------------------------- +# The file upload folder, when using models with files +UPLOAD_FOLDER = BASE_DIR + "/app/static/uploads/" +UPLOAD_CHUNK_SIZE = 4096 + +# The image upload folder, when using models with images +IMG_UPLOAD_FOLDER = BASE_DIR + "/app/static/uploads/" + +# The image upload url, when using models with images +IMG_UPLOAD_URL = "/static/uploads/" +# Setup image size default is (300, 200, True) +# IMG_SIZE = (300, 200, True) + +# Default cache timeout, applies to all cache backends unless specifically overridden in +# each cache config. +CACHE_DEFAULT_TIMEOUT = int(timedelta(days=1).total_seconds()) + +# Default cache for Superset objects +CACHE_CONFIG: CacheConfig = {"CACHE_TYPE": "NullCache"} + +# Cache for datasource metadata and query results +DATA_CACHE_CONFIG: CacheConfig = {"CACHE_TYPE": "NullCache"} + +# Cache for dashboard filter state. `CACHE_TYPE` defaults to `SupersetMetastoreCache` +# that stores the values in the key-value table in the Superset metastore, as it's +# required for Superset to operate correctly, but can be replaced by any +# `Flask-Caching` backend. +FILTER_STATE_CACHE_CONFIG: CacheConfig = { + "CACHE_TYPE": "SupersetMetastoreCache", + "CACHE_DEFAULT_TIMEOUT": int(timedelta(days=90).total_seconds()), + # Should the timeout be reset when retrieving a cached value? + "REFRESH_TIMEOUT_ON_RETRIEVAL": True, + # The following parameter only applies to `MetastoreCache`: + # How should entries be serialized/deserialized? + "CODEC": JsonKeyValueCodec(), +} + +# Cache for explore form data state. `CACHE_TYPE` defaults to `SupersetMetastoreCache` +# that stores the values in the key-value table in the Superset metastore, as it's +# required for Superset to operate correctly, but can be replaced by any +# `Flask-Caching` backend. +EXPLORE_FORM_DATA_CACHE_CONFIG: CacheConfig = { + "CACHE_TYPE": "SupersetMetastoreCache", + "CACHE_DEFAULT_TIMEOUT": int(timedelta(days=7).total_seconds()), + # Should the timeout be reset when retrieving a cached value? + "REFRESH_TIMEOUT_ON_RETRIEVAL": True, + # The following parameter only applies to `MetastoreCache`: + # How should entries be serialized/deserialized? + "CODEC": JsonKeyValueCodec(), +} + +# store cache keys by datasource UID (via CacheKey) for custom processing/invalidation +STORE_CACHE_KEYS_IN_METADATA_DB = False + +# CORS Options +ENABLE_CORS = False +CORS_OPTIONS: dict[Any, Any] = {} + +# Sanitizes the HTML content used in markdowns to allow its rendering in a safe manner. +# Disabling this option is not recommended for security reasons. If you wish to allow +# valid safe elements that are not included in the default sanitization schema, use the +# HTML_SANITIZATION_SCHEMA_EXTENSIONS configuration. +HTML_SANITIZATION = True + +# Use this configuration to extend the HTML sanitization schema. +# By default we use the GitHub schema defined in +# https://github.com/syntax-tree/hast-util-sanitize/blob/main/lib/schema.js +# For example, the following configuration would allow the rendering of the +# style attribute for div elements and the ftp protocol in hrefs: +# HTML_SANITIZATION_SCHEMA_EXTENSIONS = { +# "attributes": { +# "div": ["style"], +# }, +# "protocols": { +# "href": ["ftp"], +# } +# } +# Be careful when extending the default schema to avoid XSS attacks. +HTML_SANITIZATION_SCHEMA_EXTENSIONS: dict[str, Any] = {} + +# Chrome allows up to 6 open connections per domain at a time. When there are more +# than 6 slices in dashboard, a lot of time fetch requests are queued up and wait for +# next available socket. PR #5039 is trying to allow domain sharding for Superset, +# and this feature will be enabled by configuration only (by default Superset +# doesn't allow cross-domain request). +SUPERSET_WEBSERVER_DOMAINS = None + +# Allowed format types for upload on Database view +EXCEL_EXTENSIONS = {"xlsx", "xls"} +CSV_EXTENSIONS = {"csv", "tsv", "txt"} +COLUMNAR_EXTENSIONS = {"parquet", "zip"} +ALLOWED_EXTENSIONS = {*EXCEL_EXTENSIONS, *CSV_EXTENSIONS, *COLUMNAR_EXTENSIONS} + +# Optional maximum file size in bytes when uploading a CSV +CSV_UPLOAD_MAX_SIZE = None + +# CSV Options: key/value pairs that will be passed as argument to DataFrame.to_csv +# method. +# note: index option should not be overridden +CSV_EXPORT = {"encoding": "utf-8"} + +# Excel Options: key/value pairs that will be passed as argument to DataFrame.to_excel +# method. +# note: index option should not be overridden +EXCEL_EXPORT: dict[str, Any] = {} + +# --------------------------------------------------- +# Time grain configurations +# --------------------------------------------------- +# List of time grains to disable in the application (see list of builtin +# time grains in superset/db_engine_specs/base.py). +# For example: to disable 1 second time grain: +# TIME_GRAIN_DENYLIST = ['PT1S'] +TIME_GRAIN_DENYLIST: list[str] = [] + +# Additional time grains to be supported using similar definitions as in +# superset/db_engine_specs/base.py. +# For example: To add a new 2 second time grain: +# TIME_GRAIN_ADDONS = {'PT2S': '2 second'} +TIME_GRAIN_ADDONS: dict[str, str] = {} + +# Implementation of additional time grains per engine. +# The column to be truncated is denoted `{col}` in the expression. +# For example: To implement 2 second time grain on clickhouse engine: +# TIME_GRAIN_ADDON_EXPRESSIONS = { +# 'clickhouse': { +# 'PT2S': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 2)*2)' +# } +# } +TIME_GRAIN_ADDON_EXPRESSIONS: dict[str, dict[str, str]] = {} + +# Map of custom time grains and artificial join column producers used +# when generating the join key between results and time shifts. +# See superset/common/query_context_processor.get_aggregated_join_column +# +# Example of a join column producer that aggregates by fiscal year +# def join_producer(row: Series, column_index: int) -> str: +# return row[index].strftime("%F") +# +# TIME_GRAIN_JOIN_COLUMN_PRODUCERS = {"P1F": join_producer} +TIME_GRAIN_JOIN_COLUMN_PRODUCERS: dict[str, Callable[[Series, int], str]] = {} + +# --------------------------------------------------- +# List of viz_types not allowed in your environment +# For example: Disable pivot table and treemap: +# VIZ_TYPE_DENYLIST = ['pivot_table', 'treemap'] +# --------------------------------------------------- + +VIZ_TYPE_DENYLIST: list[str] = [] + +# -------------------------------------------------- +# Modules, datasources and middleware to be registered +# -------------------------------------------------- +DEFAULT_MODULE_DS_MAP = OrderedDict( + [ + ("superset.connectors.sqla.models", ["SqlaTable"]), + ] +) +ADDITIONAL_MODULE_DS_MAP: dict[str, list[str]] = {} +ADDITIONAL_MIDDLEWARE: list[Callable[..., Any]] = [] + +# 1) https://docs.python-guide.org/writing/logging/ +# 2) https://docs.python.org/2/library/logging.config.html + +# Default configurator will consume the LOG_* settings below +LOGGING_CONFIGURATOR = DefaultLoggingConfigurator() + +# Console Log Settings + +LOG_FORMAT = "%(asctime)s:%(levelname)s:%(name)s:%(message)s" +LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO + +# --------------------------------------------------- +# Enable Time Rotate Log Handler +# --------------------------------------------------- +# LOG_LEVEL = DEBUG, INFO, WARNING, ERROR, CRITICAL + +ENABLE_TIME_ROTATE = False +TIME_ROTATE_LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO +FILENAME = os.path.join(DATA_DIR, "superset.log") +ROLLOVER = "midnight" +INTERVAL = 1 +BACKUP_COUNT = 30 + +# Custom logger for auditing queries. This can be used to send ran queries to a +# structured immutable store for auditing purposes. The function is called for +# every query ran, in both SQL Lab and charts/dashboards. +# def QUERY_LOGGER( +# database, +# query, +# schema=None, +# client=None, +# security_manager=None, +# log_params=None, +# ): +# pass +QUERY_LOGGER = None + +# Set this API key to enable Mapbox visualizations +MAPBOX_API_KEY = os.environ.get("MAPBOX_API_KEY", "") + +# Maximum number of rows returned for any analytical database query +SQL_MAX_ROW = 100000 + +# Maximum number of rows displayed in SQL Lab UI +# Is set to avoid out of memory/localstorage issues in browsers. Does not affect +# exported CSVs +DISPLAY_MAX_ROW = 10000 + +# Default row limit for SQL Lab queries. Is overridden by setting a new limit in +# the SQL Lab UI +DEFAULT_SQLLAB_LIMIT = 1000 + +# The limit for the Superset Meta DB when the feature flag ENABLE_SUPERSET_META_DB is on +SUPERSET_META_DB_LIMIT: int | None = 1000 + +# Adds a warning message on sqllab save query and schedule query modals. +SQLLAB_SAVE_WARNING_MESSAGE = None +SQLLAB_SCHEDULE_WARNING_MESSAGE = None + +# Max payload size (MB) for SQL Lab to prevent browser hangs with large results. +SQLLAB_PAYLOAD_MAX_MB = None + +# Force refresh while auto-refresh in dashboard +DASHBOARD_AUTO_REFRESH_MODE: Literal["fetch", "force"] = "force" +# Dashboard auto refresh intervals +DASHBOARD_AUTO_REFRESH_INTERVALS = [ + [0, "Don't refresh"], + [10, "10 seconds"], + [30, "30 seconds"], + [60, "1 minute"], + [300, "5 minutes"], + [1800, "30 minutes"], + [3600, "1 hour"], + [21600, "6 hours"], + [43200, "12 hours"], + [86400, "24 hours"], +] + +# This is used as a workaround for the alerts & reports scheduler task to get the time +# celery beat triggered it, see https://github.com/celery/celery/issues/6974 for details +CELERY_BEAT_SCHEDULER_EXPIRES = timedelta(weeks=1) + +# Default celery config is to use SQLA as a broker, in a production setting +# you'll want to use a proper broker as specified here: +# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/index.html + + +class CeleryConfig: # pylint: disable=too-few-public-methods + broker_url = "sqla+sqlite:///celerydb.sqlite" + imports = ( + "superset.sql_lab", + "superset.tasks.scheduler", + "superset.tasks.thumbnails", + "superset.tasks.cache", + ) + result_backend = "db+sqlite:///celery_results.sqlite" + worker_prefetch_multiplier = 1 + task_acks_late = False + task_annotations = { + "sql_lab.get_sql_results": { + "rate_limit": "100/s", + }, + } + beat_schedule = { + "reports.scheduler": { + "task": "reports.scheduler", + "schedule": crontab(minute="*", hour="*"), + "options": {"expires": int(CELERY_BEAT_SCHEDULER_EXPIRES.total_seconds())}, + }, + "reports.prune_log": { + "task": "reports.prune_log", + "schedule": crontab(minute=0, hour=0), + }, + # Uncomment to enable pruning of the query table + # "prune_query": { + # "task": "prune_query", + # "schedule": crontab(minute=0, hour=0, day_of_month=1), + # "options": {"retention_period_days": 180}, + # }, + } + + +CELERY_CONFIG: type[CeleryConfig] = CeleryConfig + +# Set celery config to None to disable all the above configuration +# CELERY_CONFIG = None + +# Additional static HTTP headers to be served by your Superset server. Note +# Flask-Talisman applies the relevant security HTTP headers. +# +# DEFAULT_HTTP_HEADERS: sets default values for HTTP headers. These may be overridden +# within the app +# OVERRIDE_HTTP_HEADERS: sets override values for HTTP headers. These values will +# override anything set within the app +DEFAULT_HTTP_HEADERS: dict[str, Any] = {} +OVERRIDE_HTTP_HEADERS: dict[str, Any] = {} +HTTP_HEADERS: dict[str, Any] = {} + +# The db id here results in selecting this one as a default in SQL Lab +DEFAULT_DB_ID = None + +# Timeout duration for SQL Lab synchronous queries +SQLLAB_TIMEOUT = int(timedelta(seconds=30).total_seconds()) + +# Timeout duration for SQL Lab query validation +SQLLAB_VALIDATION_TIMEOUT = int(timedelta(seconds=10).total_seconds()) + +# SQLLAB_DEFAULT_DBID +SQLLAB_DEFAULT_DBID = None + +# The MAX duration a query can run for before being killed by celery. +SQLLAB_ASYNC_TIME_LIMIT_SEC = int(timedelta(hours=6).total_seconds()) + +# Some databases support running EXPLAIN queries that allow users to estimate +# query costs before they run. These EXPLAIN queries should have a small +# timeout. +SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT = int(timedelta(seconds=10).total_seconds()) + +# Timeout duration for SQL Lab fetching query results by the resultsKey. +# 0 means no timeout. +SQLLAB_QUERY_RESULT_TIMEOUT = 0 + +# The cost returned by the databases is a relative value; in order to map the cost to +# a tangible value you need to define a custom formatter that takes into consideration +# your specific infrastructure. For example, you could analyze queries a posteriori by +# running EXPLAIN on them, and compute a histogram of relative costs to present the +# cost as a percentile, this step is optional as every db engine spec has its own +# query cost formatter, but it you wanna customize it you can define it inside the config: + +# def postgres_query_cost_formatter( +# result: List[Dict[str, Any]] +# ) -> List[Dict[str, str]]: +# # 25, 50, 75% percentiles +# percentile_costs = [100.0, 1000.0, 10000.0] +# +# out = [] +# for row in result: +# relative_cost = row["Total cost"] +# percentile = bisect.bisect_left(percentile_costs, relative_cost) + 1 +# out.append({ +# "Relative cost": relative_cost, +# "Percentile": str(percentile * 25) + "%", +# }) +# +# return out +# +# QUERY_COST_FORMATTERS_BY_ENGINE: {"postgresql": postgres_query_cost_formatter} +QUERY_COST_FORMATTERS_BY_ENGINE: dict[ + str, Callable[[list[dict[str, Any]]], list[dict[str, Any]]] +] = {} + +# Flag that controls if limit should be enforced on the CTA (create table as queries). +SQLLAB_CTAS_NO_LIMIT = False + +# This allows you to define custom logic around the "CREATE TABLE AS" or CTAS feature +# in SQL Lab that defines where the target schema should be for a given user. +# Database `CTAS Schema` has a precedence over this setting. +# Example below returns a username and CTA queries will write tables into the schema +# name `username` +# SQLLAB_CTAS_SCHEMA_NAME_FUNC = lambda database, user, schema, sql: user.username +# This is move involved example where depending on the database you can leverage data +# available to assign schema for the CTA query: +# def compute_schema_name(database: Database, user: User, schema: str, sql: str) -> str: +# if database.name == 'mysql_payments_slave': +# return 'tmp_superset_schema' +# if database.name == 'presto_gold': +# return user.username +# if database.name == 'analytics': +# if 'analytics' in [r.name for r in user.roles]: +# return 'analytics_cta' +# else: +# return f'tmp_{schema}' +# Function accepts database object, user object, schema name and sql that will be run. +SQLLAB_CTAS_SCHEMA_NAME_FUNC: ( + None | (Callable[[Database, models.User, str, str], str]) +) = None + +# If enabled, it can be used to store the results of long-running queries +# in SQL Lab by using the "Run Async" button/feature +RESULTS_BACKEND: BaseCache | None = None + +# Use PyArrow and MessagePack for async query results serialization, +# rather than JSON. This feature requires additional testing from the +# community before it is fully adopted, so this config option is provided +# in order to disable should breaking issues be discovered. +RESULTS_BACKEND_USE_MSGPACK = True + +# The S3 bucket where you want to store your external hive tables created +# from CSV files. For example, 'companyname-superset' +CSV_TO_HIVE_UPLOAD_S3_BUCKET = None + +# The directory within the bucket specified above that will +# contain all the external tables +CSV_TO_HIVE_UPLOAD_DIRECTORY = "EXTERNAL_HIVE_TABLES/" + + +# Function that creates upload directory dynamically based on the +# database used, user and schema provided. +def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name + database: Database, + user: models.User, # pylint: disable=unused-argument + schema: str | None, +) -> str: + # Note the final empty path enforces a trailing slash. + return os.path.join( + CSV_TO_HIVE_UPLOAD_DIRECTORY, str(database.id), schema or "", "" + ) + + +# The namespace within hive where the tables created from +# uploading CSVs will be stored. +UPLOADED_CSV_HIVE_NAMESPACE: str | None = None + + +# Function that computes the allowed schemas for the CSV uploads. +# Allowed schemas will be a union of schemas_allowed_for_file_upload +# db configuration and a result of this function. +def allowed_schemas_for_csv_upload( # pylint: disable=unused-argument + database: Database, + user: models.User, +) -> list[str]: + return [UPLOADED_CSV_HIVE_NAMESPACE] if UPLOADED_CSV_HIVE_NAMESPACE else [] + + +ALLOWED_USER_CSV_SCHEMA_FUNC = allowed_schemas_for_csv_upload + +# Values that should be treated as nulls for the csv uploads. +CSV_DEFAULT_NA_NAMES = list(STR_NA_VALUES) + +# A dictionary of items that gets merged into the Jinja context for +# SQL Lab. The existing context gets updated with this dictionary, +# meaning values for existing keys get overwritten by the content of this +# dictionary. Exposing functionality through JINJA_CONTEXT_ADDONS has security +# implications as it opens a window for a user to execute untrusted code. +# It's important to make sure that the objects exposed (as well as objects attached +# to those objects) are harmless. We recommend only exposing simple/pure functions that +# return native types. +JINJA_CONTEXT_ADDONS: dict[str, Callable[..., Any]] = {} + +# A dictionary of macro template processors (by engine) that gets merged into global +# template processors. The existing template processors get updated with this +# dictionary, which means the existing keys get overwritten by the content of this +# dictionary. The customized addons don't necessarily need to use Jinja templating +# language. This allows you to define custom logic to process templates on a per-engine +# basis. Example value = `{"presto": CustomPrestoTemplateProcessor}` +CUSTOM_TEMPLATE_PROCESSORS: dict[str, type[BaseTemplateProcessor]] = {} + +# Roles that are controlled by the API / Superset and should not be changed +# by humans. +ROBOT_PERMISSION_ROLES = ["Public", "Gamma", "Alpha", "Admin", "sql_lab"] + +CONFIG_PATH_ENV_VAR = "SUPERSET_CONFIG_PATH" + +# If a callable is specified, it will be called at app startup while passing +# a reference to the Flask app. This can be used to alter the Flask app +# in whatever way. +# example: FLASK_APP_MUTATOR = lambda x: x.before_request = f +FLASK_APP_MUTATOR = None + +# smtp server configuration +SMTP_HOST = "localhost" +SMTP_STARTTLS = True +SMTP_SSL = False +SMTP_USER = "superset" +SMTP_PORT = 25 +SMTP_PASSWORD = "superset" +SMTP_MAIL_FROM = "superset@superset.com" +# If True creates a default SSL context with ssl.Purpose.CLIENT_AUTH using the +# default system root CA certificates. +SMTP_SSL_SERVER_AUTH = False +ENABLE_CHUNK_ENCODING = False + +# Whether to bump the logging level to ERROR on the flask_appbuilder package +# Set to False if/when debugging FAB related issues like +# permission management +SILENCE_FAB = True + +FAB_ADD_SECURITY_VIEWS = True +FAB_ADD_SECURITY_PERMISSION_VIEW = False +FAB_ADD_SECURITY_VIEW_MENU_VIEW = False +FAB_ADD_SECURITY_PERMISSION_VIEWS_VIEW = False + +# The link to a page containing common errors and their resolutions +# It will be appended at the bottom of sql_lab errors. +TROUBLESHOOTING_LINK = "" + +# CSRF token timeout, set to None for a token that never expires +WTF_CSRF_TIME_LIMIT = int(timedelta(weeks=1).total_seconds()) + +# This link should lead to a page with instructions on how to gain access to a +# Datasource. It will be placed at the bottom of permissions errors. +PERMISSION_INSTRUCTIONS_LINK = "" + +# Integrate external Blueprints to the app by passing them to your +# configuration. These blueprints will get integrated in the app +BLUEPRINTS: list[Blueprint] = [] + +# Provide a callable that receives a tracking_url and returns another +# URL. This is used to translate internal Hadoop job tracker URL +# into a proxied one + + +# Transform SQL query tracking url for Hive and Presto engines. You may also +# access information about the query itself by adding a second parameter +# to your transformer function, e.g.: +# TRACKING_URL_TRANSFORMER = ( +# lambda url, query: url if is_fresh(query) else None +# ) +# pylint: disable-next=unnecessary-lambda-assignment +TRACKING_URL_TRANSFORMER = lambda url: url # noqa: E731 + + +# customize the polling time of each engine +DB_POLL_INTERVAL_SECONDS: dict[str, int] = {} + +# Interval between consecutive polls when using Presto Engine +# See here: https://github.com/dropbox/PyHive/blob/8eb0aeab8ca300f3024655419b93dad926c1a351/pyhive/presto.py#L93 # pylint: disable=line-too-long,useless-suppression +PRESTO_POLL_INTERVAL = int(timedelta(seconds=1).total_seconds()) + +# Allow list of custom authentications for each DB engine. +# Example: +# from your.module import AuthClass +# from another.extra import auth_method +# +# ALLOWED_EXTRA_AUTHENTICATIONS: Dict[str, Dict[str, Callable[..., Any]]] = { +# "trino": { +# "custom_auth": AuthClass, +# "another_auth_method": auth_method, +# }, +# } +ALLOWED_EXTRA_AUTHENTICATIONS: dict[str, dict[str, Callable[..., Any]]] = {} + +# The id of a template dashboard that should be copied to every new user +DASHBOARD_TEMPLATE_ID = None + + +# A context manager that wraps the call to `create_engine`. This can be used for many +# things, such as chrooting to prevent 3rd party drivers to access the filesystem, or +# setting up custom configuration for database drivers. +@contextmanager +def engine_context_manager( # pylint: disable=unused-argument + database: Database, + catalog: str | None, + schema: str | None, +) -> Iterator[None]: + yield None + + +ENGINE_CONTEXT_MANAGER = engine_context_manager + +# A callable that allows altering the database connection URL and params +# on the fly, at runtime. This allows for things like impersonation or +# arbitrary logic. For instance you can wire different users to +# use different connection parameters, or pass their email address as the +# username. The function receives the connection uri object, connection +# params, the username, and returns the mutated uri and params objects. +# Example: +# def DB_CONNECTION_MUTATOR(uri, params, username, security_manager, source): +# user = security_manager.find_user(username=username) +# if user and user.email: +# uri.username = user.email +# return uri, params +# +# Note that the returned uri and params are passed directly to sqlalchemy's +# as such `create_engine(url, **params)` +DB_CONNECTION_MUTATOR = None + + +# A callable that is invoked for every invocation of DB Engine Specs +# which allows for custom validation of the engine URI. +# See: superset.db_engine_specs.base.BaseEngineSpec.validate_database_uri +# Example: +# def DB_ENGINE_URI_VALIDATOR(sqlalchemy_uri: URL): +# if not : +# raise Exception("URI invalid") +# +DB_SQLA_URI_VALIDATOR: Callable[[URL], None] | None = None + +# A set of disallowed SQL functions per engine. This is used to restrict the use of +# unsafe SQL functions in SQL Lab and Charts. The keys of the dictionary are the engine +# names, and the values are sets of disallowed functions. +DISALLOWED_SQL_FUNCTIONS: dict[str, set[str]] = { + "postgresql": { + "database_to_xml", + "inet_client_addr", + "inet_server_addr", + "query_to_xml", + "query_to_xml_and_xmlschema", + "table_to_xml", + "table_to_xml_and_xmlschema", + "version", + }, + "clickhouse": {"url"}, + "mysql": {"version"}, +} + + +# A function that intercepts the SQL to be executed and can alter it. +# A common use case for this is around adding some sort of comment header to the SQL +# with information such as the username and worker node information +# +# def SQL_QUERY_MUTATOR( +# sql, +# security_manager=security_manager, +# database=database, +# ): +# dttm = datetime.now().isoformat() +# return f"-- [SQL LAB] {user_name} {dttm}\n{sql}" +# +# NOTE: For backward compatibility, you can unpack any of the above arguments in your +# function definition, but keep the **kwargs as the last argument to allow new args +# to be added later without any errors. +# NOTE: whatever you in this function DOES NOT affect the cache key, so ideally this function +# is "functional", as in deterministic from its input. +def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument + sql: str, **kwargs: Any +) -> str: + return sql + + +# A variable that chooses whether to apply the SQL_QUERY_MUTATOR before or after splitting the input query +# It allows for using the SQL_QUERY_MUTATOR function for more than comments +# Usage: If you want to apply a change to every statement to a given query, set MUTATE_AFTER_SPLIT = True +# An example use case is if data has role based access controls, and you want to apply +# a SET ROLE statement alongside every user query. Changing this variable maintains +# functionality for both the SQL_Lab and Charts. +MUTATE_AFTER_SPLIT = False + + +# This allows for a user to add header data to any outgoing emails. For example, +# if you need to include metadata in the header or you want to change the specifications +# of the email title, header, or sender. +def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument + msg: MIMEMultipart, **kwargs: Any +) -> MIMEMultipart: + return msg + + +# Define a list of usernames to be excluded from all dropdown lists of users +# Owners, filters for created_by, etc. +# The users can also be excluded by overriding the get_exclude_users_from_lists method +# in security manager +EXCLUDE_USERS_FROM_LISTS: list[str] | None = None + +# For database connections, this dictionary will remove engines from the available +# list/dropdown if you do not want these dbs to show as available. +# The available list is generated by driver installed, and some engines have multiple +# drivers. +# e.g., DBS_AVAILABLE_DENYLIST: Dict[str, Set[str]] = {"databricks": {"pyhive", "pyodbc"}} +DBS_AVAILABLE_DENYLIST: dict[str, set[str]] = {} + +# This auth provider is used by background (offline) tasks that need to access +# protected resources. Can be overridden by end users in order to support +# custom auth mechanisms +MACHINE_AUTH_PROVIDER_CLASS = "superset.utils.machine_auth.MachineAuthProvider" + +# --------------------------------------------------- +# Alerts & Reports +# --------------------------------------------------- +# Used for Alerts/Reports (Feature flask ALERT_REPORTS) to set the size for the +# sliding cron window size, should be synced with the celery beat config minus 1 second +ALERT_REPORTS_CRON_WINDOW_SIZE = 59 +ALERT_REPORTS_WORKING_TIME_OUT_KILL = True +# Which user to attempt to execute Alerts/Reports as. By default, +# execute as the primary owner of the alert/report (giving priority to the last +# modifier and then the creator if either is contained within the list of owners, +# otherwise the first owner will be used). +# +# To first try to execute as the creator in the owners list (if present), then fall +# back to the creator, then the last modifier in the owners list (if present), then the +# last modifier, then an owner and finally `THUMBNAIL_SELENIUM_USER`, set as follows: +# ALERT_REPORTS_EXECUTE_AS = [ +# ExecutorType.CREATOR_OWNER, +# ExecutorType.CREATOR, +# ExecutorType.MODIFIER_OWNER, +# ExecutorType.MODIFIER, +# ExecutorType.OWNER, +# ExecutorType.SELENIUM, +# ] +ALERT_REPORTS_EXECUTE_AS: list[ExecutorType] = [ExecutorType.OWNER] +# if ALERT_REPORTS_WORKING_TIME_OUT_KILL is True, set a celery hard timeout +# Equal to working timeout + ALERT_REPORTS_WORKING_TIME_OUT_LAG +ALERT_REPORTS_WORKING_TIME_OUT_LAG = int(timedelta(seconds=10).total_seconds()) +# if ALERT_REPORTS_WORKING_TIME_OUT_KILL is True, set a celery hard timeout +# Equal to working timeout + ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG +ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG = int(timedelta(seconds=1).total_seconds()) +# Default values that user using when creating alert +ALERT_REPORTS_DEFAULT_WORKING_TIMEOUT = 3600 +ALERT_REPORTS_DEFAULT_RETENTION = 90 +ALERT_REPORTS_DEFAULT_CRON_VALUE = "0 0 * * *" # every day +# If set to true no notification is sent, the worker will just log a message. +# Useful for debugging +ALERT_REPORTS_NOTIFICATION_DRY_RUN = False +# Max tries to run queries to prevent false errors caused by transient errors +# being returned to users. Set to a value >1 to enable retries. +ALERT_REPORTS_QUERY_EXECUTION_MAX_TRIES = 1 +# Custom width for screenshots +ALERT_REPORTS_MIN_CUSTOM_SCREENSHOT_WIDTH = 600 +ALERT_REPORTS_MAX_CUSTOM_SCREENSHOT_WIDTH = 2400 +# Set a minimum interval threshold between executions (for each Alert/Report) +# Value should be an integer i.e. int(timedelta(minutes=5).total_seconds()) +# You can also assign a function to the config that returns the expected integer +ALERT_MINIMUM_INTERVAL = int(timedelta(minutes=0).total_seconds()) +REPORT_MINIMUM_INTERVAL = int(timedelta(minutes=0).total_seconds()) + +# A custom prefix to use on all Alerts & Reports emails +EMAIL_REPORTS_SUBJECT_PREFIX = "[Report] " + +# The text for call-to-action link in Alerts & Reports emails +EMAIL_REPORTS_CTA = "Explore in Superset" + +# Slack API token for the superset reports, either string or callable +SLACK_API_TOKEN: Callable[[], str] | str | None = None +SLACK_PROXY = None + +# The webdriver to use for generating reports. Use one of the following +# firefox +# Requires: geckodriver and firefox installations +# Limitations: can be buggy at times +# chrome: +# Requires: headless chrome +# Limitations: unable to generate screenshots of elements +WEBDRIVER_TYPE = "firefox" + +# Window size - this will impact the rendering of the data +WEBDRIVER_WINDOW = { + "dashboard": (1600, 2000), + "slice": (3000, 1200), + "pixel_density": 1, +} + +# An optional override to the default auth hook used to provide auth to the offline +# webdriver (when using Selenium) or browser context (when using Playwright - see +# PLAYWRIGHT_REPORTS_AND_THUMBNAILS feature flag) +WEBDRIVER_AUTH_FUNC = None + +# Any config options to be passed as-is to the webdriver +WEBDRIVER_CONFIGURATION: dict[Any, Any] = {"service_log_path": "/dev/null"} + +# Additional args to be passed as arguments to the config object +# Note: If using Chrome, you'll want to add the "--marionette" arg. +WEBDRIVER_OPTION_ARGS = ["--headless"] + +# The base URL to query for accessing the user interface +WEBDRIVER_BASEURL = "http://0.0.0.0:8080/" +# The base URL for the email report hyperlinks. +WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL +# Time selenium will wait for the page to load and render for the email report. +EMAIL_PAGE_RENDER_WAIT = int(timedelta(seconds=30).total_seconds()) + +# Send user to a link where they can report bugs +BUG_REPORT_URL = None +BUG_REPORT_TEXT = "Report a bug" +BUG_REPORT_ICON = None # Recommended size: 16x16 + +# Send user to a link where they can read more about Superset +DOCUMENTATION_URL = None +DOCUMENTATION_TEXT = "Documentation" +DOCUMENTATION_ICON = None # Recommended size: 16x16 + +# What is the Last N days relative in the time selector to: +# 'today' means it is midnight (00:00:00) in the local timezone +# 'now' means it is relative to the query issue time +# If both start and end time is set to now, this will make the time +# filter a moving window. By only setting the end time to now, +# start time will be set to midnight, while end will be relative to +# the query issue time. +DEFAULT_RELATIVE_START_TIME = "today" +DEFAULT_RELATIVE_END_TIME = "today" + +# Configure which SQL validator to use for each engine +SQL_VALIDATORS_BY_ENGINE = { + "presto": "PrestoDBSQLValidator", + "postgresql": "PostgreSQLValidator", +} + +# A list of preferred databases, in order. These databases will be +# displayed prominently in the "Add Database" dialog. You should +# use the "engine_name" attribute of the corresponding DB engine spec +# in `superset/db_engine_specs/`. +PREFERRED_DATABASES: list[str] = [ + "PostgreSQL", + "Presto", + "MySQL", + "SQLite", + # etc. +] +# When adding a new database we try to connect to it. Depending on which parameters are +# incorrect this could take a couple minutes, until the SQLAlchemy driver pinging the +# database times out. Instead of relying on the driver timeout we can specify a shorter +# one here. +TEST_DATABASE_CONNECTION_TIMEOUT = timedelta(seconds=30) + +# Details needed for databases that allows user to authenticate using personal +# OAuth2 tokens. See https://github.com/apache/superset/issues/20300 for more +# information. The scope and URIs are optional. +DATABASE_OAUTH2_CLIENTS: dict[str, dict[str, Any]] = { + # "Google Sheets": { + # "id": "XXX.apps.googleusercontent.com", + # "secret": "GOCSPX-YYY", + # "scope": " ".join( + # [ + # "https://www.googleapis.com/auth/drive.readonly", + # "https://www.googleapis.com/auth/spreadsheets", + # "https://spreadsheets.google.com/feeds", + # ] + # ), + # "authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth", + # "token_request_uri": "https://oauth2.googleapis.com/token", + # }, +} +# OAuth2 state is encoded in a JWT using the alogorithm below. +DATABASE_OAUTH2_JWT_ALGORITHM = "HS256" +# By default the redirect URI points to /api/v1/database/oauth2/ and doesn't have to be +# specified. If you're running multiple Superset instances you might want to have a +# proxy handling the redirects, since redirect URIs need to be registered in the OAuth2 +# applications. In that case, the proxy can forward the request to the correct instance +# by looking at the `default_redirect_uri` attribute in the OAuth2 state object. +# DATABASE_OAUTH2_REDIRECT_URI = "http://localhost:8088/api/v1/database/oauth2/" +# Timeout when fetching access and refresh tokens. +DATABASE_OAUTH2_TIMEOUT = timedelta(seconds=30) + +# Enable/disable CSP warning +CONTENT_SECURITY_POLICY_WARNING = True + +# Do you want Talisman enabled? +TALISMAN_ENABLED = utils.cast_to_boolean(os.environ.get("TALISMAN_ENABLED", True)) + +# If you want Talisman, how do you want it configured?? +TALISMAN_CONFIG = { + "content_security_policy": { + "base-uri": ["'self'"], + "default-src": ["'self'"], + "img-src": [ + "'self'", + "blob:", + "data:", + "https://apachesuperset.gateway.scarf.sh", + "https://static.scarf.sh/", + # "https://avatars.slack-edge.com", # Uncomment when SLACK_ENABLE_AVATARS is True + ], + "worker-src": ["'self'", "blob:"], + "connect-src": [ + "'self'", + "https://api.mapbox.com", + "https://events.mapbox.com", + ], + "object-src": "'none'", + "style-src": [ + "'self'", + "'unsafe-inline'", + ], + "script-src": ["'self'", "'strict-dynamic'"], + }, + "content_security_policy_nonce_in": ["script-src"], + "force_https": False, + "session_cookie_secure": False, +} +# React requires `eval` to work correctly in dev mode +TALISMAN_DEV_CONFIG = { + "content_security_policy": { + "base-uri": ["'self'"], + "default-src": ["'self'"], + "img-src": [ + "'self'", + "blob:", + "data:", + "https://apachesuperset.gateway.scarf.sh", + "https://static.scarf.sh/", + "https://avatars.slack-edge.com", + ], + "worker-src": ["'self'", "blob:"], + "connect-src": [ + "'self'", + "https://api.mapbox.com", + "https://events.mapbox.com", + ], + "object-src": "'none'", + "style-src": [ + "'self'", + "'unsafe-inline'", + ], + "script-src": ["'self'", "'unsafe-inline'", "'unsafe-eval'"], + }, + "content_security_policy_nonce_in": ["script-src"], + "force_https": False, + "session_cookie_secure": False, +} + +# +# Flask session cookie options +# +# See https://flask.palletsprojects.com/en/1.1.x/security/#set-cookie-options +# for details +# +SESSION_COOKIE_HTTPONLY = True # Prevent cookie from being read by frontend JS? +SESSION_COOKIE_SECURE = False # Prevent cookie from being transmitted over non-tls? +SESSION_COOKIE_SAMESITE: Literal["None", "Lax", "Strict"] | None = "Lax" +# Whether to use server side sessions from flask-session or Flask secure cookies +SESSION_SERVER_SIDE = False +# Example config using Redis as the backend for server side sessions +# from flask_session import RedisSessionInterface +# +# SESSION_SERVER_SIDE = True +# SESSION_TYPE = "redis" +# SESSION_REDIS = Redis(host="localhost", port=6379, db=0) +# +# Other possible config options and backends: +# # https://flask-session.readthedocs.io/en/latest/config.html + +# Cache static resources. +SEND_FILE_MAX_AGE_DEFAULT = int(timedelta(days=365).total_seconds()) + +# URI to database storing the example data, points to +# SQLALCHEMY_DATABASE_URI by default if set to `None` +SQLALCHEMY_EXAMPLES_URI = ( + "sqlite:///" + os.path.join(DATA_DIR, "examples.db") + "?check_same_thread=false" +) + +# Optional prefix to be added to all static asset paths when rendering the UI. +# This is useful for hosting assets in an external CDN, for example +STATIC_ASSETS_PREFIX = "" + +# Some sqlalchemy connection strings can open Superset to security risks. +# Typically these should not be allowed. +PREVENT_UNSAFE_DB_CONNECTIONS = True + +# If true all default urls on datasets will be handled as relative URLs by the frontend +PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET = True + +# Define a list of allowed URLs for dataset data imports (v1). +# Simple example to only allow URLs that belong to certain domains: +# ALLOWED_IMPORT_URL_DOMAINS = [ +# r"^https://.+\.domain1\.com\/?.*", r"^https://.+\.domain2\.com\/?.*" +# ] +DATASET_IMPORT_ALLOWED_DATA_URLS = [r".*"] + +# Path used to store SSL certificates that are generated when using custom certs. +# Defaults to temporary directory. +# Example: SSL_CERT_PATH = "/certs" +SSL_CERT_PATH: str | None = None + +# SQLA table mutator, every time we fetch the metadata for a certain table +# (superset.connectors.sqla.models.SqlaTable), we call this hook +# to allow mutating the object with this callback. +# This can be used to set any properties of the object based on naming +# conventions and such. You can find examples in the tests. + +# pylint: disable-next=unnecessary-lambda-assignment +SQLA_TABLE_MUTATOR = lambda table: table # noqa: E731 + + +# Global async query config options. +# Requires GLOBAL_ASYNC_QUERIES feature flag to be enabled. +GLOBAL_ASYNC_QUERY_MANAGER_CLASS = ( + "superset.async_events.async_query_manager.AsyncQueryManager" +) +GLOBAL_ASYNC_QUERIES_REDIS_CONFIG = { + "port": 6379, + "host": "127.0.0.1", + "password": "", + "db": 0, + "ssl": False, +} +GLOBAL_ASYNC_QUERIES_REDIS_STREAM_PREFIX = "async-events-" +GLOBAL_ASYNC_QUERIES_REDIS_STREAM_LIMIT = 1000 +GLOBAL_ASYNC_QUERIES_REDIS_STREAM_LIMIT_FIREHOSE = 1000000 +GLOBAL_ASYNC_QUERIES_REGISTER_REQUEST_HANDLERS = True +GLOBAL_ASYNC_QUERIES_JWT_COOKIE_NAME = "async-token" +GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SECURE = False +GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SAMESITE: None | (Literal["None", "Lax", "Strict"]) = ( + None +) +GLOBAL_ASYNC_QUERIES_JWT_COOKIE_DOMAIN = None +GLOBAL_ASYNC_QUERIES_JWT_SECRET = "test-secret-change-me" +GLOBAL_ASYNC_QUERIES_TRANSPORT: Literal["polling", "ws"] = "polling" +GLOBAL_ASYNC_QUERIES_POLLING_DELAY = int( + timedelta(milliseconds=500).total_seconds() * 1000 +) +GLOBAL_ASYNC_QUERIES_WEBSOCKET_URL = "ws://127.0.0.1:8080/" + +# Global async queries cache backend configuration options: +# - Set 'CACHE_TYPE' to 'RedisCache' for RedisCacheBackend. +# - Set 'CACHE_TYPE' to 'RedisSentinelCache' for RedisSentinelCacheBackend. +# - Set 'CACHE_TYPE' to 'None' to fall back on 'GLOBAL_ASYNC_QUERIES_REDIS_CONFIG'. +GLOBAL_ASYNC_QUERIES_CACHE_BACKEND = { + "CACHE_TYPE": "RedisCache", + "CACHE_REDIS_HOST": "localhost", + "CACHE_REDIS_PORT": 6379, + "CACHE_REDIS_USER": "", + "CACHE_REDIS_PASSWORD": "", + "CACHE_REDIS_DB": 0, + "CACHE_DEFAULT_TIMEOUT": 300, + "CACHE_REDIS_SENTINELS": [("localhost", 26379)], + "CACHE_REDIS_SENTINEL_MASTER": "mymaster", + "CACHE_REDIS_SENTINEL_PASSWORD": None, + "CACHE_REDIS_SSL": False, # True or False + "CACHE_REDIS_SSL_CERTFILE": None, + "CACHE_REDIS_SSL_KEYFILE": None, + "CACHE_REDIS_SSL_CERT_REQS": "required", + "CACHE_REDIS_SSL_CA_CERTS": None, +} + +# Embedded config options +GUEST_ROLE_NAME = "Public" +GUEST_TOKEN_JWT_SECRET = "test-guest-secret-change-me" +GUEST_TOKEN_JWT_ALGO = "HS256" +GUEST_TOKEN_HEADER_NAME = "X-GuestToken" +GUEST_TOKEN_JWT_EXP_SECONDS = 300 # 5 minutes +# Guest token audience for the embedded superset, either string or callable +GUEST_TOKEN_JWT_AUDIENCE: Callable[[], str] | str | None = None + +# A callable that can be supplied to do extra validation of guest token configuration +# for example certain RLS parameters: +# lambda x: len(x['rls']) == 1 and "tenant_id=" in x['rls'][0]['clause'] +# +# Takes the GuestTokenUser dict as an argument +# Return False from the callable to return a HTTP 400 to the user. + +GUEST_TOKEN_VALIDATOR_HOOK = None + +# A SQL dataset health check. Note if enabled it is strongly advised that the callable +# be memoized to aid with performance, i.e., +# +# @cache_manager.cache.memoize(timeout=0) +# def DATASET_HEALTH_CHECK(datasource: SqlaTable) -> Optional[str]: +# if ( +# datasource.sql and +# len(sql_parse.ParsedQuery(datasource.sql, strip_comments=True).tables) == 1 +# ): +# return ( +# "This virtual dataset queries only one table and therefore could be " +# "replaced by querying the table directly." +# ) +# +# return None +# +# Within the FLASK_APP_MUTATOR callable, i.e., once the application and thus cache have +# been initialized it is also necessary to add the following logic to blow the cache for +# all datasources if the callback function changed. +# +# def FLASK_APP_MUTATOR(app: Flask) -> None: +# name = "DATASET_HEALTH_CHECK" +# func = app.config[name] +# code = func.uncached.__code__.co_code +# +# if cache_manager.cache.get(name) != code: +# cache_manager.cache.delete_memoized(func) +# cache_manager.cache.set(name, code, timeout=0) +# +DATASET_HEALTH_CHECK: Callable[[SqlaTable], str] | None = None + +# the advanced data type key should correspond to that set in the column metadata +ADVANCED_DATA_TYPES: dict[str, AdvancedDataType] = { + "internet_address": internet_address, + "port": internet_port, +} + +# By default, the Welcome page features all charts and dashboards the user has access +# to. This can be changed to show only examples, or a custom view +# by providing the title and a FAB filter: +# WELCOME_PAGE_LAST_TAB = ( +# "Xyz", +# [{"col": 'created_by', "opr": 'rel_o_m', "value": 10}], +# ) +WELCOME_PAGE_LAST_TAB: Literal["examples", "all"] | tuple[str, list[dict[str, Any]]] = ( + "all" +) + +# Max allowed size for a zipped file +ZIPPED_FILE_MAX_SIZE = 100 * 1024 * 1024 # 100MB +# Max allowed compression ratio for a zipped file +ZIP_FILE_MAX_COMPRESS_RATIO = 200.0 + +# Configuration for environment tag shown on the navbar. Setting 'text' to '' will hide the tag. +# 'color' can either be a hex color code, or a dot-indexed theme color (e.g. error.base) +ENVIRONMENT_TAG_CONFIG = { + "variable": "SUPERSET_ENV", + "values": { + "debug": { + "color": "error.base", + "text": "flask-debug", + }, + "development": { + "color": "error.base", + "text": "Development", + }, + "production": { + "color": "", + "text": "", + }, + }, +} + + +# Extra related query filters make it possible to limit which objects are shown +# in the UI. For examples, to only show "admin" or users starting with the letter "b" in +# the "Owners" dropdowns, you could add the following in your config: +# def user_filter(query: Query, *args, *kwargs): +# from superset import security_manager +# +# user_model = security_manager.user_model +# filters = [ +# user_model.username == "admin", +# user_model.username.ilike("b%"), +# ] +# return query.filter(or_(*filters)) +# +# EXTRA_RELATED_QUERY_FILTERS = {"user": user_filter} +# +# Similarly, to restrict the roles in the "Roles" dropdown you can provide a custom +# filter callback for the "role" key. +class ExtraRelatedQueryFilters(TypedDict, total=False): + role: Callable[[Query], Query] + user: Callable[[Query], Query] + + +EXTRA_RELATED_QUERY_FILTERS: ExtraRelatedQueryFilters = {} + +# ? Custom CSS + + +# Extra dynamic query filters make it possible to limit which objects are shown +# in the UI before any other filtering is applied. Useful for example when +# considering to filter using Feature Flags along with regular role filters +# that get applied by default in our base_filters. +# For example, to only show a database starting with the letter "b" +# in the "Database Connections" list, you could add the following in your config: +# def initial_database_filter(query: Query, *args, *kwargs): +# from superset.models.core import Database +# +# filter = Database.database_name.startswith('b') +# return query.filter(filter) +# +# EXTRA_DYNAMIC_QUERY_FILTERS = {"database": initial_database_filter} +class ExtraDynamicQueryFilters(TypedDict, total=False): + databases: Callable[[Query], Query] + + +EXTRA_DYNAMIC_QUERY_FILTERS: ExtraDynamicQueryFilters = {} + + +# ------------------------------------------------------------------- +# * WARNING: STOP EDITING HERE * +# ------------------------------------------------------------------- +# Don't add config values below this line since local configs won't be +# able to override them. +if CONFIG_PATH_ENV_VAR in os.environ: + # Explicitly import config module that is not necessarily in pythonpath; useful + # for case where app is being executed via pex. + cfg_path = os.environ[CONFIG_PATH_ENV_VAR] + try: + module = sys.modules[__name__] + spec = importlib.util.spec_from_file_location("superset_config", cfg_path) + override_conf = importlib.util.module_from_spec(spec) + spec.loader.exec_module(override_conf) + for key in dir(override_conf): + if key.isupper(): + setattr(module, key, getattr(override_conf, key)) + + click.secho(f"Loaded your LOCAL configuration at [{cfg_path}]", fg="cyan") + except Exception: + logger.exception( + "Failed to import config for %s=%s", CONFIG_PATH_ENV_VAR, cfg_path + ) + raise +elif importlib.util.find_spec("superset_config") and not is_test(): + try: + # pylint: disable=import-error,wildcard-import,unused-wildcard-import + import superset_config + from superset_config import * # noqa: F403, F401 + + click.secho( + f"Loaded your LOCAL configuration at [{superset_config.__file__}]", + fg="cyan", + ) + except Exception: + logger.exception("Found but failed to import local superset_config") + raise \ No newline at end of file From bf434065ebe51e2a084d5905d640cb51763677de Mon Sep 17 00:00:00 2001 From: Srini Date: Thu, 12 Dec 2024 16:45:28 +0530 Subject: [PATCH 08/12] add example secret directory --- .../analytics/superset/secrets/client-secret.json | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 Docker-Swarm-deployment/analytics/superset/secrets/client-secret.json diff --git a/Docker-Swarm-deployment/analytics/superset/secrets/client-secret.json b/Docker-Swarm-deployment/analytics/superset/secrets/client-secret.json new file mode 100644 index 000000000..1721bdee3 --- /dev/null +++ b/Docker-Swarm-deployment/analytics/superset/secrets/client-secret.json @@ -0,0 +1,13 @@ +{ + "": { + "issuer": "https:///auth/realms/", + "auth_uri": "https:///auth/realms//protocol/openid-connect/auth", + "client_id": "keycloak_client_id", + "client_secret": "", + "post_logout_redirect_uri": [""], + "userinfo_uri": "https:///auth/realms//openid-connect/userinfo", + "token_uri": "auth_token_uri", + "token_introspection_uri": "https:///auth/realms//protocol/openid-connect/token/introspect" + } +} + From c98e7123ade9c175bb33c239966263e12adbb951 Mon Sep 17 00:00:00 2001 From: Srini Date: Thu, 12 Dec 2024 16:54:12 +0530 Subject: [PATCH 09/12] update ignore files --- .../.dockerignore | 4 --- .../.gitignore | 28 ------------------- 2 files changed, 32 deletions(-) delete mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.dockerignore delete mode 100644 Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.gitignore diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.dockerignore b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.dockerignore deleted file mode 100644 index 3b37ed8e7..000000000 --- a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.dockerignore +++ /dev/null @@ -1,4 +0,0 @@ -Dockerfile -.env -superset* - diff --git a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.gitignore b/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.gitignore deleted file mode 100644 index 36fabb6cb..000000000 --- a/Docker-Swarm-deployment/analytics/superset/gra-superset-guesttoken-middlware/.gitignore +++ /dev/null @@ -1,28 +0,0 @@ -# dev -.yarn/ -!.yarn/releases -.vscode/* -!.vscode/launch.json -!.vscode/*.code-snippets -.idea/workspace.xml -.idea/usage.statistics.xml -.idea/shelf - -# deps -node_modules/ - -# env -.env -.env.production - -# logs -logs/ -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* -pnpm-debug.log* -lerna-debug.log* - -# misc -.DS_Store From 42a7f777ae7b252b9b2d059927e1e4a8ae0b8202 Mon Sep 17 00:00:00 2001 From: Srini Date: Thu, 12 Dec 2024 16:57:21 +0530 Subject: [PATCH 10/12] update stack file --- .../analytics/superset/{ => secrets}/.env | 0 .../analytics/superset/superset-stack.yaml | 8 ++++---- 2 files changed, 4 insertions(+), 4 deletions(-) rename Docker-Swarm-deployment/analytics/superset/{ => secrets}/.env (100%) diff --git a/Docker-Swarm-deployment/analytics/superset/.env b/Docker-Swarm-deployment/analytics/superset/secrets/.env similarity index 100% rename from Docker-Swarm-deployment/analytics/superset/.env rename to Docker-Swarm-deployment/analytics/superset/secrets/.env diff --git a/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml b/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml index 48ed56790..a7ab9bba3 100644 --- a/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml +++ b/Docker-Swarm-deployment/analytics/superset/superset-stack.yaml @@ -32,7 +32,7 @@ services: image: ghcr.io/datakaveri/superset:4.0.2-8 container_name: superset_init env_file: - - .env + - ./secrets/.env volumes: - superset_home:/app/superset_home configs: @@ -58,7 +58,7 @@ services: ports: - "8088:8088" env_file: - - .env + - ./secrets/.env depends_on: - superset_init volumes: @@ -108,7 +108,7 @@ services: image: ghcr.io/datakaveri/superset:4.0.2-8 container_name: superset_worker env_file: - - .env # default + - ./secrets/.env # default restart: unless-stopped volumes: - superset_home:/app/superset_home @@ -146,7 +146,7 @@ services: image: ghcr.io/datakaveri/superset:4.0.2-8 container_name: superset_worker_beat env_file: - - .env # default + - ./secrets/.env # default restart: unless-stopped volumes: - superset_home:/app/superset_home From 7a17913d905068b0e8b1b73ed4ee8d7fcc2a0d63 Mon Sep 17 00:00:00 2001 From: Srini <106464244+SRINI2410@users.noreply.github.com> Date: Thu, 12 Dec 2024 17:00:13 +0530 Subject: [PATCH 11/12] Update README.md --- .../analytics/superset/README.md | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/Docker-Swarm-deployment/analytics/superset/README.md b/Docker-Swarm-deployment/analytics/superset/README.md index cf61fdca1..ed8f33c7b 100644 --- a/Docker-Swarm-deployment/analytics/superset/README.md +++ b/Docker-Swarm-deployment/analytics/superset/README.md @@ -1,18 +1,13 @@ ## Getting started with Superset(visualization tool) using docker swarm Deploy -To be begin with, in order to deploy superset stack first we need to pass appropriate environment variables to customize superset and to establishes connection with backend components. - -#### Setting up environment variables: - -- Create `.env` (hidden) file at the same directory level as your docker stack file. -- Copy `superset_env_file` content into `.env` file and replace placeholders with actual values. - - -#### To deploy: +#### To deploy superset dashboard: ```sh docker stack deploy -c superset-stack.yaml superset ``` - +#### To deploy superset middleware: +```sh +docker stack deploy -c ssuperset-middleware-stack.yaml superset-middleware +``` #### To Check the status : ```sh docker service ls From e1f2ffc034faccd7d6c8fd96327461a5d71f12d0 Mon Sep 17 00:00:00 2001 From: Srini <106464244+SRINI2410@users.noreply.github.com> Date: Thu, 12 Dec 2024 17:14:35 +0530 Subject: [PATCH 12/12] Update README.md --- Docker-Swarm-deployment/analytics/superset/README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Docker-Swarm-deployment/analytics/superset/README.md b/Docker-Swarm-deployment/analytics/superset/README.md index ed8f33c7b..58e68173b 100644 --- a/Docker-Swarm-deployment/analytics/superset/README.md +++ b/Docker-Swarm-deployment/analytics/superset/README.md @@ -6,7 +6,7 @@ docker stack deploy -c superset-stack.yaml superset ``` #### To deploy superset middleware: ```sh -docker stack deploy -c ssuperset-middleware-stack.yaml superset-middleware +docker stack deploy -c superset-middleware-stack.yaml superset-middleware ``` #### To Check the status : ```sh @@ -23,3 +23,6 @@ rv2yw340gsd0 superset_superset_init replicated 0/1 **superset_superset_init** service will be down once it performs bootstrap operations. +##### NOTE: +1. To install custom modules add them in docker/requirements-local.txt file and redeploy the stack +2. Replace all placeholder in .env files under secrets/ and gra-superset-guesttoken-middleware/